From 964308702061716873deaf39cdabfddbef8615c2 Mon Sep 17 00:00:00 2001 From: janraj Date: Fri, 16 Dec 2016 14:31:56 -0800 Subject: [PATCH 001/192] Updating the version for the debugger release. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f586a86f10..89c9e82ae2 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.6.0-beta5", + "version": "1.6.0-beta6", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", From 8f1ec2190d95cd3c727f89a0585151b8ce89374d Mon Sep 17 00:00:00 2001 From: Ivan Zlatev Date: Sat, 17 Dec 2016 19:00:29 +0000 Subject: [PATCH 002/192] Remove C# grammar test suite and npm dependencies. --- README.md | 6 +- package.json | 3 +- test/syntaxes/class.tests.ts | 169 --------------------------- test/syntaxes/event.tests.ts | 45 ------- test/syntaxes/field.tests.ts | 134 --------------------- test/syntaxes/namespace.tests.ts | 78 ------------- test/syntaxes/property.tests.ts | 133 --------------------- test/syntaxes/string.tests.ts | 123 ------------------- test/syntaxes/utils/tokenizer.ts | 108 ----------------- test/syntaxes/utils/tokenizerUtil.ts | 10 -- 10 files changed, 3 insertions(+), 806 deletions(-) delete mode 100644 test/syntaxes/class.tests.ts delete mode 100644 test/syntaxes/event.tests.ts delete mode 100644 test/syntaxes/field.tests.ts delete mode 100644 test/syntaxes/namespace.tests.ts delete mode 100644 test/syntaxes/property.tests.ts delete mode 100644 test/syntaxes/string.tests.ts delete mode 100644 test/syntaxes/utils/tokenizer.ts delete mode 100644 test/syntaxes/utils/tokenizerUtil.ts diff --git a/README.md b/README.md index f9efad0a7f..74b1e71f55 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ The C# extension is powered by [OmniSharp](https://github.com/OmniSharp/omnishar * Ensure diagnostics are cleared in files when they are no longer needed. ([#858](https://github.com/OmniSharp/omnisharp-vscode/issues/858)) * Enqueue requests for diagnostics in visible editors when the extension starts up. ([#843](https://github.com/OmniSharp/omnisharp-vscode/issues/843)) * Provide fallback URLs for debugger downloads. ([#930](https://github.com/OmniSharp/omnisharp-vscode/issues/930)) -* Properly require .NET Framework 4.6 in the OmniSharp.exe.config file to ensure that the user is displayed a dialog on Windows machines that don't have .NET Framework 4.6 installed. ([#937](https://github.com/OmniSharp/omnisharp-vscode/issues/937)) +* Properly require .NET Framework 4.6 in the OmniSharp.exe.config file to ensure that the user is displayed a dialog on Windows machines that don't have .NET Framework 4.6 installed. ([#937](https://github.com/OmniSharp/omnisharp-vscode/issues/937)) * Fix issue with installing on non-English installations of Windows. ([#938](https://github.com/OmniSharp/omnisharp-vscode/issues/938)) * Display platform information when acquiring runtime dependencies. ([#948](https://github.com/OmniSharp/omnisharp-vscode/issues/948)) @@ -75,7 +75,7 @@ Several new settings have been added: #### Performance -* Major improvements have been made to editor performance. The communication with the OmniSharp server has been rewritten to allow long-running operations (such as gathering all errors and warnings) to queue while high priority operations (such as text buffer changes) run serially. ([#902](https://github.com/OmniSharp/omnisharp-vscode/pull/902)) _(Thanks to [@david-driscoll](https://github.com/david-driscoll) for his help with this change!)_ +* Major improvements have been made to editor performance. The communication with the OmniSharp server has been rewritten to allow long-running operations (such as gathering all errors and warnings) to queue while high priority operations (such as text buffer changes) run serially. ([#902](https://github.com/OmniSharp/omnisharp-vscode/pull/902)) _(Thanks to [@david-driscoll](https://github.com/david-driscoll) for his help with this change!)_ #### Other Improvements @@ -111,8 +111,6 @@ First install: * Node.js (newer than 4.3.1) * Npm (newer 2.14.12) -In case you get a *node-gyp* error [follow the instrutions here](https://github.com/nodejs/node-gyp/blob/master/README.md) to fix it. The *vscode-textmate* package pulls in a native node dependency and those instructions will set up the node build tool which deals with those. - To **run and develop** do the following: * Run `npm i` diff --git a/package.json b/package.json index 9bd20ee809..6f1030ef59 100644 --- a/package.json +++ b/package.json @@ -48,8 +48,7 @@ "typescript": "^2.0.3", "vscode": "^0.11.13", "vsce": "^1.7.0", - "chai": "^3.5.0", - "vscode-textmate": "^2.1.1" + "chai": "^3.5.0" }, "runtimeDependencies": [ { diff --git a/test/syntaxes/class.tests.ts b/test/syntaxes/class.tests.ts deleted file mode 100644 index cd95f9dd99..0000000000 --- a/test/syntaxes/class.tests.ts +++ /dev/null @@ -1,169 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Class", function() { - it("class keyword and storage modifiers", function() { - -const input = ` -namespace TestNamespace -{ - public class PublicClass { } - - class DefaultClass { } - - internal class InternalClass { } - - static class DefaultStaticClass { } - - public static class PublicStaticClass { } - - sealed class DefaultSealedClass { } - - public sealed class PublicSealedClass { } - - public abstract class PublicAbstractClass { } - - abstract class DefaultAbstractClass { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.ClassKeyword("class", 4, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 30)); - - tokens.should.contain(Tokens.ClassKeyword("class", 6, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultClass", 6, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("internal", 8, 5)); - tokens.should.contain(Tokens.ClassKeyword("class", 8, 24)); - tokens.should.contain(Tokens.ClassIdentifier("InternalClass", 8, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("static", 10, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 10, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultStaticClass", 10, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 12, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("static", 12, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 12, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicStaticClass", 12, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 14, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 14, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultSealedClass", 14, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 16, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 16, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 16, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicSealedClass", 16, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 18, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 18, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 18, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicAbstractClass", 18, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 20, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 20, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultAbstractClass", 20, 30)); - - }); - - it("generics in identifier", function () { - - const input = ` -namespace TestNamespace -{ - class Dictionary> { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("Dictionary>", 4, 11)); - }); - - it("inheritance", function() { - -const input = ` -namespace TestNamespace -{ - class PublicClass : IInterface, IInterfaceTwo { } - class PublicClass : Root.IInterface, Something.IInterfaceTwo { } - class PublicClass : Dictionary>, IMap> { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Type("IInterface", 4, 28)); - tokens.should.contain(Tokens.Type("IInterfaceTwo", 4, 43)); - - tokens.should.contain(Tokens.ClassKeyword("class", 5, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Root.IInterface", 5, 28)); - tokens.should.contain(Tokens.Type("Something.IInterfaceTwo", 5, 63)); - - tokens.should.contain(Tokens.Type("Dictionary>", 6, 28)); - tokens.should.contain(Tokens.Type("IMap>", 6, 71)); - }); - - it("generic constraints", function() { - -const input = ` -namespace TestNamespace -{ - class PublicClass where T : ISomething { } - class PublicClass : Dictionary[]>, ISomething where T : ICar, new() where X : struct { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Keyword("where", 4, 26)); - tokens.should.contain(Tokens.Type("T", 4, 32)); - tokens.should.contain(Tokens.Type("ISomething", 4, 36)); - - tokens.should.contain(Tokens.ClassKeyword("class", 5, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Dictionary[]>", 5, 31)); - tokens.should.contain(Tokens.Type("ISomething", 5, 62)); - tokens.should.contain(Tokens.Keyword("where", 5, 73)); - tokens.should.contain(Tokens.Type("T", 5, 79)); - tokens.should.contain(Tokens.Type("ICar", 5, 83)); - tokens.should.contain(Tokens.Keyword("new", 5, 89)); - tokens.should.contain(Tokens.Keyword("where", 5, 95)); - tokens.should.contain(Tokens.Type("X", 5, 101)); - tokens.should.contain(Tokens.Keyword("struct", 5, 105)); - - }); - - it("nested class", function() { - -const input = ` -namespace TestNamespace -{ - class Klass - { - public class Nested - { - - } - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("Klass", 4, 11)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 6, 9)); - tokens.should.contain(Tokens.ClassKeyword("class", 6, 16)); - tokens.should.contain(Tokens.ClassIdentifier("Nested", 6, 22)); - }); - }); -}); - - diff --git a/test/syntaxes/event.tests.ts b/test/syntaxes/event.tests.ts deleted file mode 100644 index 38a849fb99..0000000000 --- a/test/syntaxes/event.tests.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Event", function() { - it("declaration", function() { - -const input = ` -public class Tester -{ - public event Type Event; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); - tokens.should.contain(Tokens.Type("Type", 4, 18)); - tokens.should.contain(Tokens.EventIdentifier("Event", 4, 23)); - }); - - it("generic", function () { - - const input = ` -public class Tester -{ - public event EventHandler, Dictionary> Event; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); - tokens.should.contain(Tokens.Type("EventHandler, Dictionary>", 4, 18)); - tokens.should.contain(Tokens.EventIdentifier("Event", 4, 58)); - }); - }); -}); - - diff --git a/test/syntaxes/field.tests.ts b/test/syntaxes/field.tests.ts deleted file mode 100644 index dee1b2bd7b..0000000000 --- a/test/syntaxes/field.tests.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Field", function() { - it("declaration", function() { - -const input = ` -public class Tester -{ - private List _field; - private List field; - private List field123; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("List", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 18)); - - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 18)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 6, 18)); - }); - - it("generic", function () { - - const input = ` -public class Tester -{ - private Dictionary< List, Dictionary> _field; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary< List, Dictionary>", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 52)); - }); - - - it("modifiers", function() { - -const input = ` -public class Tester -{ - private static readonly List _field; - readonly string _field2; - string _field3; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("static", 4, 13)); - tokens.should.contain(Tokens.StorageModifierKeyword("readonly", 4, 20)); - tokens.should.contain(Tokens.Type("List", 4, 29)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 34)); - - tokens.should.contain(Tokens.FieldIdentifier("_field2", 5, 21)); - - tokens.should.contain(Tokens.FieldIdentifier("_field3", 6, 12)); - }); - - it("types", function() { - -const input = ` -public class Tester -{ - string field123; - string[] field123; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.Type("string", 4, 5)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 4, 12)); - - tokens.should.contain(Tokens.Type("string[]", 5, 5)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 5, 14)); - }); - - it("assignment", function() { - -const input = ` -public class Tester -{ - private string field = "hello"; - const bool field = true; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("string", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 29)); - - tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5)); - tokens.should.contain(Tokens.Type("bool", 5, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20)); - tokens.should.contain(Tokens.LanguageConstant("true", 5, 28)); - }); - - it("expression body", function() { - -const input = ` -public class Tester -{ - private string field => "hello"; - const bool field => true; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("string", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5)); - tokens.should.contain(Tokens.Type("bool", 5, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20)); - tokens.should.contain(Tokens.LanguageConstant("true", 5, 29)); - }); - }); -}); - - diff --git a/test/syntaxes/namespace.tests.ts b/test/syntaxes/namespace.tests.ts deleted file mode 100644 index 30b3ca341d..0000000000 --- a/test/syntaxes/namespace.tests.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function () { - should(); - }); - - describe("Namespace", function() { - it("has a namespace keyword and a name", function() { - -const input = ` -namespace TestNamespace -{ -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11)); - }); - - it("can be nested", function() { - -const input = ` -namespace TestNamespace -{ - namespace NestedNamespace { - - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 4, 5)); - tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 4, 15)); - }); - - it("can contain using statements", function() { - -const input = ` -using UsineOne; -using one = UsineOne.Something; - -namespace TestNamespace -{ - using UsingTwo; - using two = UsineOne.Something; - - namespace NestedNamespace - { - using UsingThree; - using three = UsineOne.Something; - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.UsingKeyword("using", 2, 1)); - tokens.should.contain(Tokens.UsingKeyword("using", 3, 1)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 5, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 5, 11)); - - tokens.should.contain(Tokens.UsingKeyword("using", 7, 5)); - tokens.should.contain(Tokens.UsingKeyword("using", 8, 5)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 10, 5)); - tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 10, 15)); - - tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); - tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); - }); - }); -}); - - diff --git a/test/syntaxes/property.tests.ts b/test/syntaxes/property.tests.ts deleted file mode 100644 index a7ad89745d..0000000000 --- a/test/syntaxes/property.tests.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Property", function() { - it("declaration", function() { - -const input = ` -class Tester -{ - public IBooom Property - { - get { return null; } - set { something = value; } - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 6, 9)); - tokens.should.contain(Tokens.Keyword("set", 7, 9)); - }); - - it("declaration single line", function() { - -const input = ` -class Tester -{ - public IBooom Property { get { return null; } private set { something = value; } } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 4, 30)); - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 51)); - tokens.should.contain(Tokens.Keyword("set", 4, 59)); - }); - - - it("declaration without modifiers", function() { - -const input = ` -class Tester -{ - IBooom Property {get; set;} -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.Type("IBooom", 4, 5)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 12)); - }); - - it("auto-property single line", function() { - -const input = ` -class Tester -{ - public IBooom Property { get; set; } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 4, 30)); - tokens.should.contain(Tokens.Keyword("set", 4, 35)); - }); - - it("auto-property", function() { - -const input = ` -class Tester -{ - public IBooom Property - { - get; - set; - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 6, 9)); - tokens.should.contain(Tokens.Keyword("set", 7, 9)); - }); - - it("generic auto-property", function() { - -const input = ` -class Tester -{ - public Dictionary[]> Property { get; set; } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary[]>", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 42)); - tokens.should.contain(Tokens.Keyword("get", 4, 53)); - tokens.should.contain(Tokens.Keyword("set", 4, 58)); - }); - - it("auto-property initializer", function() { - -const input = ` -class Tester -{ - public Dictionary[]> Property { get; } = new Dictionary[]>(); -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary[]>", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 42)); - tokens.should.contain(Tokens.Keyword("get", 4, 53)); - tokens.should.contain(Tokens.StorageModifierKeyword("new", 4, 62)); - }); - }); -}); - - diff --git a/test/syntaxes/string.tests.ts b/test/syntaxes/string.tests.ts deleted file mode 100644 index fe58bb6f7d..0000000000 --- a/test/syntaxes/string.tests.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("String interpolated", function() { - it("non-verbatim", function() { - -const input = ` -public class Tester -{ - string test = $"hello {one} world {two}!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello ", 4, 21)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 28)); - tokens.should.contain(Tokens.StringDoubleQuoted(" world ", 4, 32)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 4, 40)); - tokens.should.contain(Tokens.StringDoubleQuoted("!", 4, 44)); - tokens.should.contain(Tokens.StringEnd('"', 4, 45)); - }); - - - it("non-verbatim without expressions single-line", function() { - -const input = ` -public class Tester -{ - string test = $"hello world!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello world!", 4, 21)); - tokens.should.contain(Tokens.StringEnd('"', 4, 33)); - }); - - it("non-verbatim multi-line", function() { - -const input = ` -public class Tester -{ - string test = $"hello -world!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 21)); - tokens.should.not.contain(Tokens.StringDoubleQuoted("world!", 5, 1)); - tokens.should.not.contain(Tokens.StringEnd('"', 5, 7)); - }); - - - it("verbatim single-line", function() { - -const input = ` -public class Tester -{ - string test = $@"hello {one} world {two}!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 29)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world ", 4, 33)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 4, 41)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("!", 4, 45)); - tokens.should.contain(Tokens.StringEnd('"', 4, 46)); - }); - - - it("verbatim multi-line", function() { - -const input = ` -public class Tester -{ - string test = $@"hello {one} - world {two}!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 29)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world ", 5, 1)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 5, 12)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("!", 5, 16)); - tokens.should.contain(Tokens.StringEnd('"', 5, 17)); - }); - - it("verbatim multi-line without expressions", function() { - -const input = ` -public class Tester -{ - string test = $@"hello - world!"; -}`; - - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello", 4, 22)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world!", 5, 1)); - tokens.should.contain(Tokens.StringEnd('"', 5, 11)); - }); - }); -}); - - diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts deleted file mode 100644 index e49d2e3bfd..0000000000 --- a/test/syntaxes/utils/tokenizer.ts +++ /dev/null @@ -1,108 +0,0 @@ -import {ITokenizeLineResult, Registry, IGrammar, StackElement} from 'vscode-textmate'; - -export class Tokenizer -{ - private _grammar : IGrammar; - - constructor(grammarFilePath: string) { - this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); - } - - public tokenize(input: string): Token[] { - let tokens: Token[] = []; - - // ensure consistent line-endings irrelevant of OS - input = input.replace("\r\n","\n"); - - let previousStack : StackElement = null; - - const lines: string[] = input.split("\n"); - - for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { - const line = lines[lineIndex]; - - let result: ITokenizeLineResult = this._grammar.tokenizeLine(line, previousStack); - previousStack = result.ruleStack; - - for (const token of result.tokens) { - const text = line.substring(token.startIndex, token.endIndex); - const type : string = token.scopes[token.scopes.length - 1]; - tokens.push(new Token(text, type, lineIndex+1, token.startIndex + 1)); - } - } - - return tokens; - } -} - -export class Token { - constructor(text: string, type: string, line?: number, column?: number) { - this.text = text; - this.type = type; - this.column = column; - this.line = line; - } - - public text: string; - public type: string; - public line: number; - public column: number; -} - -export namespace Tokens { - - function createToken(text: string, type: string, line?: number, column?: number) : Token { - return new Token(text, type, line, column); - } - - export const NamespaceKeyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.namespace.cs", line, column); - - export const NamespaceIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.namespace.cs", line, column); - - export const UsingKeyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.using.cs", line, column); - - export const ClassKeyword = (text: string, line?: number, column?: number) => - createToken(text, "storage.modifier.cs", line, column); - - export const ClassIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "storage.type.cs", line, column); - - export const StorageModifierKeyword = (text: string, line?: number, column?: number) => - createToken(text, "storage.modifier.cs", line, column); - - export const Type = (text: string, line?: number, column?: number) => - createToken(text, "storage.type.cs", line, column); - - export const Keyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.cs", line, column); - - export const FieldIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.variable.cs", line, column); - - export const StringDoubleQuoted = (text: string, line?: number, column?: number) => - createToken(text, "string.quoted.double.cs", line, column); - - export const StringDoubleQuotedVerbatim = (text: string, line?: number, column?: number) => - createToken(text, "string.quoted.double.literal.cs", line, column); - - export const EventIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.variable.cs", line, column); - - export const LanguageConstant = (text: string, line?: number, column?: number) => - createToken(text, "constant.language.cs", line, column); - - export const PropertyIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.function.cs", line, column); - - export const StringInterpolatedExpression = (text: string, line?: number, column?: number) => - createToken(text, "meta.interpolated.expression.cs", line, column); - - export const StringStart = (text: string, line?: number, column?: number) => - createToken(text, "punctuation.definition.string.begin.cs", line, column); - - export const StringEnd = (text: string, line?: number, column?: number) => - createToken(text, "punctuation.definition.string.end.cs", line, column); -} diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts deleted file mode 100644 index 2bb010b035..0000000000 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Tokenizer, Token } from './tokenizer'; - -export class TokenizerUtil -{ - private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp.json"); - - public static tokenize(input: string): Token[] { - return TokenizerUtil._tokenizer.tokenize(input); - } -} From dddb22ebb47944b15c7bbd4e89bbc7b2e95d7ba9 Mon Sep 17 00:00:00 2001 From: Ivan Zlatev Date: Sat, 17 Dec 2016 19:08:52 +0000 Subject: [PATCH 003/192] Added "npm run update-grammar" which updates the "csharp-textmate-grammar" package to the latest stable and replaces the grammar under "syntaxes". --- gulpfile.js | 5 +++++ package.json | 6 ++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/gulpfile.js b/gulpfile.js index 0eb960b7bf..0765238120 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -183,3 +183,8 @@ gulp.task('tslint', () => { emitError: false })) }); + +gulp.task('updateGrammar', () => { + gulp.src("node_modules/csharp-textmate-grammar/grammars/csharp.json") + .pipe(gulp.dest("syntaxes/")) +}); \ No newline at end of file diff --git a/package.json b/package.json index 6f1030ef59..1c6a0aae91 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,8 @@ "compile": "node ./node_modules/vscode/bin/compile -p ./ && gulp tslint", "watch": "node ./node_modules/vscode/bin/compile -watch -p ./", "test": "mocha --timeout 15000 -u tdd ./out/test/*.tests.js ./out/test/**/*.tests.js", - "postinstall": "node ./node_modules/vscode/bin/install" + "postinstall": "node ./node_modules/vscode/bin/install", + "update-grammar": "npm update csharp-textmate-grammar && gulp updateGrammar" }, "dependencies": { "fs-extra-promise": "^0.3.1", @@ -38,6 +39,7 @@ "yauzl": "^2.5.0" }, "devDependencies": { + "csharp-textmate-grammar": "*", "del": "^2.0.2", "gulp": "^3.9.1", "gulp-mocha": "^2.1.3", @@ -1125,4 +1127,4 @@ } ] } -} \ No newline at end of file +} From 113017e77a29f4346775d03de0997f191af4459e Mon Sep 17 00:00:00 2001 From: Ivan Zlatev Date: Sat, 17 Dec 2016 19:05:46 +0000 Subject: [PATCH 004/192] Pull in v0.2.0 of the C# grammar which includes a number of fixes and features such as static usings. --- syntaxes/csharp.json | 61 +++++++++++++++++++++++++++++++++++++------- 1 file changed, 52 insertions(+), 9 deletions(-) diff --git a/syntaxes/csharp.json b/syntaxes/csharp.json index 4126156b04..7a6a0e9a11 100644 --- a/syntaxes/csharp.json +++ b/syntaxes/csharp.json @@ -25,6 +25,37 @@ "name": "keyword.other.using.cs" } }, + "patterns": [ + { + "match": "\\s*static\\b", + "name": "keyword.other.static.cs" + }, + { + "include": "#comments" + }, + { + "begin": "(\\w+)\\s*=\\s*", + "beginCaptures": { + "1": { + "name": "entity.name.alias.namespace.cs" + } + }, + "patterns": [ + { + "include": "#comments" + }, + { + "match": "[\\w.]+", + "name": "entity.name.type.namespace.cs" + } + ], + "end": "\\s+" + }, + { + "match": "[\\w.]+", + "name": "entity.name.type.namespace.cs" + } + ], "end": "\\s*(?:$|;)" }, "namespace": { @@ -75,7 +106,7 @@ "patterns": [ { "begin": "(?=(?:(?:(?:private|public|volatile|internal|protected|static|readonly|const|event)\\s*)*)(?:[\\w\\s,<>\\[\\]]+?)(?:[\\w]+)\\s*(?:;|=|=>))", - "end": "(?=;)", + "end": "(?=;)|$", "patterns": [ { "match": "^\\s*((?:(?:private|public|volatile|internal|protected|static|readonly|const|event)\\s*)*)\\s*(.+?)\\s*([\\w]+)\\s*(?=;|=)", @@ -101,7 +132,7 @@ }, { "begin": "(?==>?)", - "end": "(?=;|$)", + "end": "(?=;)|$", "patterns": [ { "include": "#code" @@ -123,7 +154,7 @@ } }, { - "match": "^\\s*\\b(?!var|return|yield|throw)([\\w<>*?\\[\\]]+)\\s+([\\w]+)\\s*(?=(=(?!=)|;))", + "match": "^\\s*\\b(?!var|return|yield|throw|private|public|internal|protected)([\\w<>*?\\[\\]]+)\\s+([\\w]+)\\s*(?=(=(?!=)|;))", "captures": { "1": { "name": "storage.type.variable.cs" @@ -289,7 +320,7 @@ "type-body": { "patterns": [ { - "include": "type-declaration" + "include": "#type-declaration" }, { "include": "#field-declaration" @@ -548,7 +579,7 @@ "name": "constant.language.cs" }, { - "match": "\\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\\.?[0-9]*)|(\\.[0-9]+))((e|E)(\\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\\b", + "match": "\\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\\.?[0-9]*)|(\\.[0-9]+))((e|E)(\\+|-)?[0-9]+)?)(?i:f|d|m|u|l|ul|lu)?\\b", "name": "constant.numeric.cs" }, { @@ -589,6 +620,9 @@ }, { "include": "#builtinTypes" + }, + { + "include": "#comments" } ] }, @@ -633,7 +667,7 @@ "method": { "patterns": [ { - "include": "attribute" + "include": "#attribute" }, { "begin": "(?=\\bnew\\s+)(?=[\\w<].*\\s+)(?=[^=]+\\()", @@ -673,6 +707,9 @@ }, { "include": "#constants" + }, + { + "include": "#comments" } ] }, @@ -701,6 +738,9 @@ } ] }, + { + "include": "#comments" + }, { "begin": "=>", "beginCaptures": { @@ -797,11 +837,14 @@ "patterns": [ { "captures": { - "2": { - "name": "entity.name.function.preprocessor.cs" + "1": { + "name": "meta.directive.preprocessor.cs" + }, + "3": { + "name": "entity.name.preprocessor.cs" } }, - "match": "^\\s*#\\s*(if|else|elif|endif|define|undef|warning|error|line|pragma|region|endregion)\\b\\s*(.*?)(?=$|\\/\\/)", + "match": "^\\s*(#\\s*(if|else|elif|endif|define|undef|warning|error|line|pragma|region|endregion))\\b\\s*(.*?)(?=$|\\/\\/)", "name": "meta.preprocessor.cs" } ] From d04465dec732021d3e772405bccb7c27d78e3436 Mon Sep 17 00:00:00 2001 From: filipw Date: Tue, 20 Dec 2016 16:18:05 +0100 Subject: [PATCH 005/192] added auto-closing and surrounding pairs configuration --- csharp.configuration.json | 34 +++++++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/csharp.configuration.json b/csharp.configuration.json index 7c3c76c3c4..8707c381f6 100644 --- a/csharp.configuration.json +++ b/csharp.configuration.json @@ -1,11 +1,27 @@ { - "comments": { - "lineComment": "//", - "blockComment": ["/*", "*/"] - }, - "brackets": [ - ["{", "}"], - ["[", "]"], - ["(", ")"] - ] + "comments": { + "lineComment": "//", + "blockComment": ["/*", "*/"] + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "autoClosingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + { "open": "'", "close": "'", "notIn": ["string", "comment"] }, + { "open": "\"", "close": "\"", "notIn": ["string", "comment"] }, + { "open": "/*", "close": " */", "notIn": ["string"] } + ], + "surroundingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["<", ">"], + ["'", "'"], + ["\"", "\""] + ] } \ No newline at end of file From 6e6e7c61a8bd4332b38e54ccce1adf5fdb30667b Mon Sep 17 00:00:00 2001 From: Thaina Yu Date: Fri, 23 Dec 2016 22:13:34 +0700 Subject: [PATCH 006/192] Update package.json Add jsonValidation with prototyped omnisharp --- package.json | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index fe99c022a6..aa6f8f220f 100644 --- a/package.json +++ b/package.json @@ -394,10 +394,14 @@ } ], "jsonValidation": [ - { - "fileMatch": "project.json", - "url": "http://json.schemastore.org/project" - } + { + "fileMatch": "project.json", + "url": "http://json.schemastore.org/project" + }, + { + "fileMatch": "omnisharp.json", + "url": "http://json.schemastore.org/omnisharp" + } ], "commands": [ { From cae9cfe62c40cb0a4757a002b323cdb9030a8ef9 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 11:19:28 -0800 Subject: [PATCH 007/192] Start new C# grammar with comments, type names, and using directives --- syntaxes/csharp2.json | 199 ++++++++++++++++++ test/syntaxes/comments.test.syntax.ts | 44 ++++ test/syntaxes/using-directives.test.syntax.ts | 177 ++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 77 ++++++- test/syntaxes/utils/tokenizerUtil.ts | 5 + 5 files changed, 493 insertions(+), 9 deletions(-) create mode 100644 syntaxes/csharp2.json create mode 100644 test/syntaxes/comments.test.syntax.ts create mode 100644 test/syntaxes/using-directives.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json new file mode 100644 index 0000000000..e8d938b0c6 --- /dev/null +++ b/syntaxes/csharp2.json @@ -0,0 +1,199 @@ +{ + "name": "C#", + "scopeName": "source.cs", + "fileTypes": [ + "cs" + ], + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#directives" + } + ], + "repository": { + "directives": { + "patterns": [ + { + "include": "#using-directive" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, + "using-directive": { + "patterns": [ + { + "begin": "^\\s*(using)\\b\\s*(static)*", + "beginCaptures": { + "1": { + "name": "keyword.other.using.cs" + }, + "2": { + "name": "keyword.other.static.cs" + } + }, + "end": "(?=;)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#operator-assignment" + } + ] + } + ] + }, + "type": { + "name": "meta.type.cs", + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#type-builtin" + }, + { + "include": "#type-name" + }, + { + "include": "#type-parameters" + }, + { + "include": "#type-array-suffix" + } + ] + }, + "type-builtin": { + "patterns": [ + { + "match": "\\b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\\b", + "name": "storage.type.cs" + } + ] + }, + "type-name": { + "patterns": [ + { + "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(\\.)", + "captures": { + "1": { + "name": "storage.type.cs" + }, + "2": { + "name": "punctuation.accessor.cs" + } + } + }, + { + "name": "storage.type.cs", + "match": "[_$[:alpha:]][_$[:alnum:]]*" + } + ] + }, + "type-parameters": { + "name": "meta.type.parameters.cs", + "begin": "(<)", + "beginCaptures": { + "1": { + "name": "punctuation.definition.typeparameters.begin.cs" + } + }, + "end": "(>)", + "endCaptures": { + "1": { + "name": "punctuation.definition.typeparameters.end.cs" + } + }, + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] + }, + "type-array-suffix": { + "begin": "(\\[)", + "beginCaptures": { + "1": { + "name": "punctuation.squarebracket.open.cs" + } + }, + "end": "(\\])", + "endCaptures": { + "1": { + "name": "punctuation.squarebracket.close.cs" + } + }, + "patterns": [ + { + "include": "#punctuation-comma" + } + ] + }, + "operator-assignment": { + "name": "keyword.operator.assignment.cs", + "match": "\\=" + }, + "punctuation-comma": { + "name": "punctuation.separator.comma.cs", + "match": "," + }, + "punctuation-semicolon": { + "name": "punctuation.terminator.statement.cs", + "match": ";" + }, + "punctuation-accessor": { + "name": "punctuation.accessor.cs", + "match": "\\." + }, + "comment": { + "patterns": [ + { + "name": "comment.block.cs", + "begin": "/\\*", + "beginCaptures": { + "0": { + "name": "punctuation.definition.comment.cs" + } + }, + "end": "\\*/", + "endCaptures": { + "0": { + "name": "punctuation.definition.comment.cs" + } + } + }, + { + "begin": "(^[ \\t]+)?(?=//)", + "beginCaptures": { + "1": { + "name": "punctuation.whitespace.comment.leading.cs" + } + }, + "end": "(?=$)", + "patterns": [ + { + "name": "comment.line.double-slash.cs", + "begin": "//", + "beginCaptures": { + "0": { + "name": "punctuation.definition.comment.cs" + } + }, + "end": "(?=$)" + } + ] + } + ] + } + } +} \ No newline at end of file diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts new file mode 100644 index 0000000000..d0df776e46 --- /dev/null +++ b/test/syntaxes/comments.test.syntax.ts @@ -0,0 +1,44 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Comments", () => { + it("single-line comment", () => { + + const input = ` +// foo`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 1)); + tokens.should.contain(Tokens.Comment.SingleLine.Text(" foo", 2, 3)); + }); + + it("single-line comment after whitespace", () => { + + const input = ` + // foo`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Comment.LeadingWhitespace(" ", 2, 1)); + tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 5)); + tokens.should.contain(Tokens.Comment.SingleLine.Text(" foo", 2, 7)); + }); + + it("multi-line comment", () => { + + const input = ` +/* foo */`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 1)); + tokens.should.contain(Tokens.Comment.MultiLine.Text(" foo ", 2, 3)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 8)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts new file mode 100644 index 0000000000..4e69fdb788 --- /dev/null +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -0,0 +1,177 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Using directives", () => { + it("using namespace", () => { + + const input = ` +using System;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Type("System", 2, 7)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 13)); + }); + + it("using static type", () => { + + const input = ` +using static System.Console;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Keywords.Static(2, 7)); + tokens.should.contain(Tokens.Type("System", 2, 14)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 20)); + tokens.should.contain(Tokens.Type("Console", 2, 21)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 28)); + }); + + it("namespace alias", () => { + + const input = ` +using S = System;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Type("S", 2, 7)); + tokens.should.contain(Tokens.Operators.Assignment(2, 9)); + tokens.should.contain(Tokens.Type("System", 2, 11)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 17)); + }); + + it("type alias", () => { + + const input = ` +using C = System.Console;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Type("C", 2, 7)); + tokens.should.contain(Tokens.Operators.Assignment(2, 9)); + tokens.should.contain(Tokens.Type("System", 2, 11)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 17)); + tokens.should.contain(Tokens.Type("Console", 2, 18)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 25)); + }); + + it("type alias with generic type", () => { + + const input = ` +using IntList = System.Collections.Generic.List;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Type("IntList", 2, 7)); + tokens.should.contain(Tokens.Operators.Assignment(2, 15)); + tokens.should.contain(Tokens.Type("System", 2, 17)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 23)); + tokens.should.contain(Tokens.Type("Collections", 2, 24)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 35)); + tokens.should.contain(Tokens.Type("Generic", 2, 36)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 43)); + tokens.should.contain(Tokens.Type("List", 2, 44)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 48)); + tokens.should.contain(Tokens.Type("System", 2, 49)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 55)); + tokens.should.contain(Tokens.Type("Int32", 2, 56)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 61)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 62)); + }); + + it("type alias with nested generic types", () => { + + const input = ` +using X = System.Collections.Generic.Dictionary>;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Type("X", 2, 7)); + tokens.should.contain(Tokens.Operators.Assignment(2, 9)); + tokens.should.contain(Tokens.Type("System", 2, 11)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 17)); + tokens.should.contain(Tokens.Type("Collections", 2, 18)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 29)); + tokens.should.contain(Tokens.Type("Generic", 2, 30)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 37)); + tokens.should.contain(Tokens.Type("Dictionary", 2, 38)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 48)); + tokens.should.contain(Tokens.Type("System", 2, 49)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 55)); + tokens.should.contain(Tokens.Type("Int32", 2, 56)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 61)); + tokens.should.contain(Tokens.Type("System", 2, 63)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 69)); + tokens.should.contain(Tokens.Type("Collections", 2, 70)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 81)); + tokens.should.contain(Tokens.Type("Generic", 2, 82)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 89)); + tokens.should.contain(Tokens.Type("List", 2, 90)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 94)); + tokens.should.contain(Tokens.Type("System", 2, 95)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 101)); + tokens.should.contain(Tokens.Type("String", 2, 102)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 108)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 109)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 110)); + }); + + it("type alias with nested generic types and comments interspersed", () => { + + const input = ` +using/**/X/**/=/**/Dictionary/**//**/>/**/;//end`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Using(2, 1)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 6)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 8)); + tokens.should.contain(Tokens.Type("X", 2, 10)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 11)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 13)); + tokens.should.contain(Tokens.Operators.Assignment(2, 15)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 16)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 18)); + tokens.should.contain(Tokens.Type("Dictionary", 2, 20)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 30)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 32)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 34)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 35)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 37)); + tokens.should.contain(Tokens.Type("int", 2, 39)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 42)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 44)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 46)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 47)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 49)); + tokens.should.contain(Tokens.Type("List", 2, 51)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 55)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 57)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 59)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 60)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 62)); + tokens.should.contain(Tokens.Type("string", 2, 64)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 70)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 72)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 74)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 75)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 77)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 79)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 80)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 82)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 84)); + tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 85)); + tokens.should.contain(Tokens.Comment.SingleLine.Text("end", 2, 87)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index e49d2e3bfd..03572b3bf5 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -1,8 +1,7 @@ -import {ITokenizeLineResult, Registry, IGrammar, StackElement} from 'vscode-textmate'; +import { ITokenizeLineResult, Registry, IGrammar, StackElement } from 'vscode-textmate'; -export class Tokenizer -{ - private _grammar : IGrammar; +export class Tokenizer { + private _grammar: IGrammar; constructor(grammarFilePath: string) { this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); @@ -12,9 +11,9 @@ export class Tokenizer let tokens: Token[] = []; // ensure consistent line-endings irrelevant of OS - input = input.replace("\r\n","\n"); + input = input.replace("\r\n", "\n"); - let previousStack : StackElement = null; + let previousStack: StackElement = null; const lines: string[] = input.split("\n"); @@ -26,8 +25,8 @@ export class Tokenizer for (const token of result.tokens) { const text = line.substring(token.startIndex, token.endIndex); - const type : string = token.scopes[token.scopes.length - 1]; - tokens.push(new Token(text, type, lineIndex+1, token.startIndex + 1)); + const type: string = token.scopes[token.scopes.length - 1]; + tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); } } @@ -51,10 +50,70 @@ export class Token { export namespace Tokens { - function createToken(text: string, type: string, line?: number, column?: number) : Token { + function createToken(text: string, type: string, line?: number, column?: number): Token { return new Token(text, type, line, column); } + export namespace Comment { + export const LeadingWhitespace = (text: string, line?: number, column?: number) => + createToken(text, "punctuation.whitespace.comment.leading.cs", line, column); + + export namespace MultiLine { + export const End = (line?: number, column?: number) => + createToken("*/", "punctuation.definition.comment.cs", line, column); + + export const Start = (line?: number, column?: number) => + createToken("/*", "punctuation.definition.comment.cs", line, column); + + export const Text = (text: string, line?: number, column?: number) => + createToken(text, "comment.block.cs", line, column); + } + + export namespace SingleLine { + export const Start = (line?: number, column?: number) => + createToken("//", "punctuation.definition.comment.cs", line, column); + + export const Text = (text: string, line?: number, column?: number) => + createToken(text, "comment.line.double-slash.cs", line, column); + } + } + + export namespace Keywords { + export const Static = (line?: number, column?: number) => + createToken("static", "keyword.other.static.cs", line, column); + + export const Using = (line?: number, column?: number) => + createToken("using", "keyword.other.using.cs", line, column); + } + + export namespace Operators { + export const Assignment = (line?: number, column?: number) => + createToken("=", "keyword.operator.assignment.cs", line, column); + } + + export namespace Puncuation { + export const Accessor = (line?: number, column?: number) => + createToken(".", "punctuation.accessor.cs", line, column); + + export const Comma = (line?: number, column?: number) => + createToken(",", "punctuation.separator.comma.cs", line, column); + + export const Semicolon = (line?: number, column?: number) => + createToken(";", "punctuation.terminator.statement.cs", line, column); + + export const SquareBracketClose = (line?: number, column?: number) => + createToken("[", "punctuation.squarebracket.close.cs"); + + export const SquareBracketOpen = (line?: number, column?: number) => + createToken("[", "punctuation.squarebracket.open.cs"); + + export const TypeParametersBegin = (line?: number, column?: number) => + createToken("<", "punctuation.definition.typeparameters.begin.cs", line, column); + + export const TypeParametersEnd = (line?: number, column?: number) => + createToken(">", "punctuation.definition.typeparameters.end.cs", line, column); + } + export const NamespaceKeyword = (text: string, line?: number, column?: number) => createToken(text, "keyword.other.namespace.cs", line, column); diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts index 2bb010b035..a3ab47f7ec 100644 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ b/test/syntaxes/utils/tokenizerUtil.ts @@ -3,8 +3,13 @@ import { Tokenizer, Token } from './tokenizer'; export class TokenizerUtil { private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp.json"); + private static _tokenizer2: Tokenizer = new Tokenizer("syntaxes/csharp2.json"); public static tokenize(input: string): Token[] { return TokenizerUtil._tokenizer.tokenize(input); } + + public static tokenize2(input: string): Token[] { + return TokenizerUtil._tokenizer2.tokenize(input); + } } From 6c65c7ec59946f8c2df26f79b41296bea66ecddb Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 11:32:38 -0800 Subject: [PATCH 008/192] Add extern aliases --- syntaxes/csharp2.json | 20 ++++++++++++++- test/syntaxes/extern-aliases.test.syntax.ts | 27 +++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 11 +++++++++ 3 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 test/syntaxes/extern-aliases.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index e8d938b0c6..9ca27c5dbf 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -15,6 +15,9 @@ "repository": { "directives": { "patterns": [ + { + "include": "#extern-alias-directive" + }, { "include": "#using-directive" }, @@ -23,10 +26,25 @@ } ] }, + "extern-alias-directive": { + "begin": "\\s*(extern)\\b\\s*(alias)\\b\\s*([_$[:alpha:]][_$[:alnum:]]*)", + "beginCaptures": { + "1": { + "name": "keyword.other.extern.cs" + }, + "2": { + "name": "keyword.other.alias.cs" + }, + "3": { + "name": "variable.other.alias.cs" + } + }, + "end": "(?=;)" + }, "using-directive": { "patterns": [ { - "begin": "^\\s*(using)\\b\\s*(static)*", + "begin": "\\s*(using)\\b\\s*(static)*", "beginCaptures": { "1": { "name": "keyword.other.using.cs" diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts new file mode 100644 index 0000000000..92fe723904 --- /dev/null +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -0,0 +1,27 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Extern aliases", () => { + it("simple", () => { + + const input = ` +extern alias X; +extern alias Y;`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Extern(2, 1)); + tokens.should.contain(Tokens.Keywords.Alias(2, 8)); + tokens.should.contain(Tokens.Variables.Alias("X", 2, 14)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); + tokens.should.contain(Tokens.Keywords.Extern(3, 1)); + tokens.should.contain(Tokens.Keywords.Alias(3, 8)); + tokens.should.contain(Tokens.Variables.Alias("Y", 3, 14)); + tokens.should.contain(Tokens.Puncuation.Semicolon(3, 15)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 03572b3bf5..c6ab6e0606 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -79,6 +79,12 @@ export namespace Tokens { } export namespace Keywords { + export const Alias = (line?: number, column?: number) => + createToken("alias", "keyword.other.alias.cs", line, column); + + export const Extern = (line?: number, column?: number) => + createToken("extern", "keyword.other.extern.cs", line, column); + export const Static = (line?: number, column?: number) => createToken("static", "keyword.other.static.cs", line, column); @@ -114,6 +120,11 @@ export namespace Tokens { createToken(">", "punctuation.definition.typeparameters.end.cs", line, column); } + export namespace Variables { + export const Alias = (text: string, line?: number, column?: number) => + createToken(text, "variable.other.alias.cs", line, column); + } + export const NamespaceKeyword = (text: string, line?: number, column?: number) => createToken(text, "keyword.other.namespace.cs", line, column); From df59ed0c712f77ef6aaa7d6edd8d0231973e185f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 13:03:03 -0800 Subject: [PATCH 009/192] Add basic attribute support --- syntaxes/csharp2.json | 129 ++++++++++++++++++ test/syntaxes/attributes.test.syntax.ts | 103 ++++++++++++++ test/syntaxes/boolean-literals.test.syntax.ts | 37 +++++ test/syntaxes/numeric-literals.test.syntax.ts | 25 ++++ test/syntaxes/utils/tokenizer.ts | 45 +++++- 5 files changed, 335 insertions(+), 4 deletions(-) create mode 100644 test/syntaxes/attributes.test.syntax.ts create mode 100644 test/syntaxes/boolean-literals.test.syntax.ts create mode 100644 test/syntaxes/numeric-literals.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 9ca27c5dbf..688d421b6b 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -21,6 +21,9 @@ { "include": "#using-directive" }, + { + "include": "#attribute-section" + }, { "include": "#punctuation-semicolon" } @@ -65,6 +68,132 @@ } ] }, + "attribute-section": { + "begin": "(\\[)(assembly|module|field|event|method|param|property|return|type)*(\\:)*", + "beginCaptures": { + "1": { + "name": "punctuation.squarebracket.open.cs" + }, + "2": { + "name": "keyword.other.attribute-specifier.cs" + }, + "3": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(\\])", + "endCaptures": { + "1": { + "name": "punctuation.squarebracket.close.cs" + } + }, + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#attribute" + }, + { + "include": "#punctuation-comma" + } + ] + }, + "attribute": { + "patterns": [ + { + "include": "#type-name" + }, + { + "include": "#attribute-arguments" + } + ] + }, + "attribute-arguments": { + "begin": "(\\()", + "beginCaptures": { + "1": { + "name": "punctuation.parenthesis.open.cs" + } + }, + "end": "(\\))", + "endCaptures": { + "1": { + "name": "punctuation.parenthesis.close.cs" + } + }, + "patterns": [ + { + "include": "#expression" + }, + { + "include": "#punctuation-comma" + } + ] + }, + "expression": { + "patterns": [ + { + "include": "#literal" + } + ] + }, + "literal": { + "patterns": [ + { + "include": "#boolean-literal" + }, + { + "include": "#numeric-literal" + } + ] + }, + "boolean-literal": { + "patterns": [ + { + "name": "constant.language.boolean.true.cs", + "match": "(? { + before(() => should()); + + describe("Attributes", () => { + it("global attribute", () => { + + const input = ` +[Foo]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 5)); + }); + + it("global attribute with specifier", () => { + + const input = ` +[assembly: Foo]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Keywords.AttributeSpecifier("assembly", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Colon(2, 10)); + tokens.should.contain(Tokens.Type("Foo", 2, 12)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + }); + + it("Two global attributes in same section with specifier", () => { + + const input = ` +[module: Foo, Bar]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); + tokens.should.contain(Tokens.Type("Foo", 2, 10)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 13)); + tokens.should.contain(Tokens.Type("Bar", 2, 15)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 18)); + }); + + it("Two global attributes in same section with specifier and empty argument lists", () => { + + const input = ` +[module: Foo(), Bar()]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); + tokens.should.contain(Tokens.Type("Foo", 2, 10)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 13)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 15)); + tokens.should.contain(Tokens.Type("Bar", 2, 17)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 20)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 21)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 22)); + }); + + it("Global attribute with one argument", () => { + + const input = ` +[Foo(true)]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); + tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 10)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 11)); + }); + + it("Global attribute with two argument", () => { + + const input = ` +[Foo(true, 42)]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); + tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); + tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts new file mode 100644 index 0000000000..8ad04a0c56 --- /dev/null +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -0,0 +1,37 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe.skip("Literals - boolean", () => { + it("true", () => { + + const input = ` +class C { + method M() { + var x = true; + } +}`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Literals.Boolean.True(4, 17)); + }); + + it("false", () => { + + const input = ` +class C { + method M() { + var x = false; + } +}`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Literals.Boolean.False(4, 17)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts new file mode 100644 index 0000000000..71d26ef7e5 --- /dev/null +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -0,0 +1,25 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe.skip("Literals - numeric", () => { + it("decimal zero", () => { + + const input = ` +class C { + method M() { + var x = 0; + } +}`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 5)); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index c6ab6e0606..541992708b 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -82,6 +82,9 @@ export namespace Tokens { export const Alias = (line?: number, column?: number) => createToken("alias", "keyword.other.alias.cs", line, column); + export const AttributeSpecifier = (text: string, line?: number, column?: number) => + createToken(text, "keyword.other.attribute-specifier.cs", line, column); + export const Extern = (line?: number, column?: number) => createToken("extern", "keyword.other.extern.cs", line, column); @@ -92,6 +95,27 @@ export namespace Tokens { createToken("using", "keyword.other.using.cs", line, column); } + export namespace Literals { + export namespace Boolean { + export const False = (line?: number, column?: number) => + createToken("false", "constant.language.boolean.false.cs", line, column); + + export const True = (line?: number, column?: number) => + createToken("true", "constant.language.boolean.true.cs", line, column); + } + + export namespace Numeric { + export const Binary = (text: string, line?: number, column?: number) => + createToken(text, "constant.numeric.binary.cs", line, column); + + export const Decimal = (text: string, line?: number, column?: number) => + createToken(text, "constant.numeric.decimal.cs", line, column); + + export const Hexadecimal = (text: string, line?: number, column?: number) => + createToken(text, "constant.numeric.hex.cs", line, column); + } + } + export namespace Operators { export const Assignment = (line?: number, column?: number) => createToken("=", "keyword.operator.assignment.cs", line, column); @@ -101,17 +125,30 @@ export namespace Tokens { export const Accessor = (line?: number, column?: number) => createToken(".", "punctuation.accessor.cs", line, column); + export const Colon = (line?: number, column?: number) => + createToken(":", "punctuation.separator.colon.cs", line, column); + export const Comma = (line?: number, column?: number) => createToken(",", "punctuation.separator.comma.cs", line, column); + export namespace Parenthesis { + export const Close = (line?: number, column?: number) => + createToken(")", "punctuation.parenthesis.close.cs", line, column); + + export const Open = (line?: number, column?: number) => + createToken("(", "punctuation.parenthesis.open.cs", line, column); + } + export const Semicolon = (line?: number, column?: number) => createToken(";", "punctuation.terminator.statement.cs", line, column); - export const SquareBracketClose = (line?: number, column?: number) => - createToken("[", "punctuation.squarebracket.close.cs"); + export namespace SquareBracket { + export const Close = (line?: number, column?: number) => + createToken("]", "punctuation.squarebracket.close.cs", line, column); - export const SquareBracketOpen = (line?: number, column?: number) => - createToken("[", "punctuation.squarebracket.open.cs"); + export const Open = (line?: number, column?: number) => + createToken("[", "punctuation.squarebracket.open.cs", line, column); + } export const TypeParametersBegin = (line?: number, column?: number) => createToken("<", "punctuation.definition.typeparameters.begin.cs", line, column); From 3f5b10b8d8b8716eee01ef1d022be0cd84c7a40e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 13:17:49 -0800 Subject: [PATCH 010/192] String literals --- syntaxes/csharp2.json | 36 +++++++++++++++++++++++++ test/syntaxes/attributes.test.syntax.ts | 23 +++++++++++++++- test/syntaxes/utils/tokenizer.ts | 11 ++++++++ 3 files changed, 69 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 688d421b6b..752dc37a22 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -143,8 +143,14 @@ { "include": "#boolean-literal" }, + { + "include": "#null-literal" + }, { "include": "#numeric-literal" + }, + { + "include": "#string-literal" } ] }, @@ -159,6 +165,9 @@ "match": "(? { tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 11)); }); - it("Global attribute with two argument", () => { + it("Global attribute with two arguments", () => { const input = ` [Foo(true, 42)]`; @@ -99,5 +99,26 @@ describe("Grammar", () => { tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); }); + + it("Global attribute with three arguments", () => { + + const input = ` +[Foo(true, 42, "text")]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); + tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); + tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 14)); + tokens.should.contain(Tokens.Puncuation.String.Begin(2, 16)); + tokens.should.contain(Tokens.Literals.String("text", 2, 17)); + tokens.should.contain(Tokens.Puncuation.String.End(2, 21)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 22)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 23)); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 541992708b..ddfc26cfca 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -114,6 +114,9 @@ export namespace Tokens { export const Hexadecimal = (text: string, line?: number, column?: number) => createToken(text, "constant.numeric.hex.cs", line, column); } + + export const String = (text: string, line?: number, column?: number) => + createToken(text, "string.quoted.double.cs", line, column); } export namespace Operators { @@ -150,6 +153,14 @@ export namespace Tokens { createToken("[", "punctuation.squarebracket.open.cs", line, column); } + export namespace String { + export const Begin = (line?: number, column?: number) => + createToken('"', "punctuation.definition.string.begin.cs", line, column); + + export const End = (line?: number, column?: number) => + createToken('"', "punctuation.definition.string.end.cs", line, column); + } + export const TypeParametersBegin = (line?: number, column?: number) => createToken("<", "punctuation.definition.typeparameters.begin.cs", line, column); From 1887e719f5c20bdafaba8015d60e2500b7894481 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 13:37:23 -0800 Subject: [PATCH 011/192] Add attribute named arguments --- syntaxes/csharp2.json | 23 ++++++++- test/syntaxes/attributes.test.syntax.ts | 63 +++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 8 ++++ 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 752dc37a22..2708268dab 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -123,6 +123,9 @@ } }, "patterns": [ + { + "include": "#attribute-named-argument" + }, { "include": "#expression" }, @@ -131,6 +134,23 @@ } ] }, + "attribute-named-argument": { + "begin": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(?==)", + "beginCaptures": { + "1": { + "name": "entity.name.function.cs" + } + }, + "end": "(?=(,|\\)))", + "patterns": [ + { + "include": "#operator-assignment" + }, + { + "include": "#expression" + } + ] + }, "expression": { "patterns": [ { @@ -167,7 +187,8 @@ ] }, "null-literal": { - + "name": "constant.language.null.cs", + "match": "(? { tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 22)); tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 23)); }); + + it("Global attribute with named argument", () => { + + const input = ` +[Foo(Bar = 42)]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); + tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 6)); + tokens.should.contain(Tokens.Operators.Assignment(2, 10)); + tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + }); + + it("Global attribute with one positional argument and one named argument", () => { + + const input = ` +[Foo(true, Bar = 42)]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Type("Foo", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); + tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); + tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 12)); + tokens.should.contain(Tokens.Operators.Assignment(2, 16)); + tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 18)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 20)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 21)); + }); + + it("Global attribute with specifier, one positional argument, and two named arguments", () => { + + const input = ` +[module: Foo(true, Bar = 42, Baz = "hello")]`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); + tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); + tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); + tokens.should.contain(Tokens.Type("Foo", 2, 10)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 13)); + tokens.should.contain(Tokens.Literals.Boolean.True(2, 14)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 18)); + tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 20)); + tokens.should.contain(Tokens.Operators.Assignment(2, 24)); + tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 26)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 28)); + tokens.should.contain(Tokens.Identifiers.PropertyName("Baz", 2, 30)); + tokens.should.contain(Tokens.Operators.Assignment(2, 34)); + tokens.should.contain(Tokens.Puncuation.String.Begin(2, 36)); + tokens.should.contain(Tokens.Literals.String("hello", 2, 37)); + tokens.should.contain(Tokens.Puncuation.String.End(2, 42)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 43)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 44)); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index ddfc26cfca..d16ea4b754 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -95,6 +95,11 @@ export namespace Tokens { createToken("using", "keyword.other.using.cs", line, column); } + export namespace Identifiers { + export const PropertyName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.function.cs", line, column); + } + export namespace Literals { export namespace Boolean { export const False = (line?: number, column?: number) => @@ -104,6 +109,9 @@ export namespace Tokens { createToken("true", "constant.language.boolean.true.cs", line, column); } + export const Null = (line?: number, column?: number) => + createToken("null", "constant.language.null.cs", line, column); + export namespace Numeric { export const Binary = (text: string, line?: number, column?: number) => createToken(text, "constant.numeric.binary.cs", line, column); From c5831f6da9efcd4c064038af593d82c136c6e97d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 14:23:31 -0800 Subject: [PATCH 012/192] Add namespace declarations --- syntaxes/csharp2.json | 56 +++++++++++ test/syntaxes/namespace.test.syntax.ts | 78 --------------- test/syntaxes/namespaces.test.syntax.ts | 120 ++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 12 +-- 4 files changed, 182 insertions(+), 84 deletions(-) delete mode 100644 test/syntaxes/namespace.test.syntax.ts create mode 100644 test/syntaxes/namespaces.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 2708268dab..902d5b92dd 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -10,6 +10,9 @@ }, { "include": "#directives" + }, + { + "include": "#declarations" } ], "repository": { @@ -151,6 +154,59 @@ } ] }, + "declarations": { + "patterns": [ + { + "include": "#namespace-declaration" + } + ] + }, + "namespace-declaration": { + "begin": "\\b(namespace)\\s+", + "beginCaptures": { + "1": { + "name": "keyword.other.namespace.cs" + } + }, + "end": "(?<=\\})", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "entity.name.type.namespace.cs", + "match": "[_$[:alpha:]][_$[:alnum:]]*" + }, + { + "include": "#punctuation-accessor" + }, + { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.definition.block.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.definition.block.cs" + } + }, + "patterns": [ + { + "include": "#declarations" + }, + { + "include": "#using-directive" + }, + { + "include": "#punctuation-semicolon" + } + ] + } + ] + }, "expression": { "patterns": [ { diff --git a/test/syntaxes/namespace.test.syntax.ts b/test/syntaxes/namespace.test.syntax.ts deleted file mode 100644 index c071f4381f..0000000000 --- a/test/syntaxes/namespace.test.syntax.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function () { - should(); - }); - - describe("Namespace", function() { - it("has a namespace keyword and a name", function() { - -const input = ` -namespace TestNamespace -{ -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11)); - }); - - it("can be nested", function() { - -const input = ` -namespace TestNamespace -{ - namespace NestedNamespace { - - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 4, 5)); - tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 4, 15)); - }); - - it("can contain using statements", function() { - -const input = ` -using UsineOne; -using one = UsineOne.Something; - -namespace TestNamespace -{ - using UsingTwo; - using two = UsineOne.Something; - - namespace NestedNamespace - { - using UsingThree; - using three = UsineOne.Something; - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.UsingKeyword("using", 2, 1)); - tokens.should.contain(Tokens.UsingKeyword("using", 3, 1)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 5, 1)); - tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 5, 11)); - - tokens.should.contain(Tokens.UsingKeyword("using", 7, 5)); - tokens.should.contain(Tokens.UsingKeyword("using", 8, 5)); - - tokens.should.contain(Tokens.NamespaceKeyword("namespace", 10, 5)); - tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 10, 15)); - - tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); - tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); - }); - }); -}); - - diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts new file mode 100644 index 0000000000..7290105de6 --- /dev/null +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -0,0 +1,120 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => { + should(); + }); + + describe("Namespace", () => { + it("has a namespace keyword and a name", () => { + + const input = ` +namespace TestNamespace +{ +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11)); + }); + + it("has a namespace keyword and a dotted name", () => { + + const input = ` +namespace Test.Namespace +{ +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("Test", 2, 11)); + tokens.should.contain(Tokens.Puncuation.Accessor(2, 15)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("Namespace", 2, 16)); + }); + + it("can be nested", () => { + + const input = ` +namespace TestNamespace +{ + namespace NestedNamespace { + + } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11)); + + tokens.should.contain(Tokens.Keywords.Namespace(4, 5)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 4, 15)); + }); + + it("can contain using statements", () => { + + const input = ` +using UsingOne; +using one = UsingOne.Something; + +namespace TestNamespace +{ + using UsingTwo; + using two = UsingTwo.Something; + + namespace NestedNamespace + { + using UsingThree; + using three = UsingThree.Something; + } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.UsingKeyword("using", 2, 1)); + tokens.should.contain(Tokens.Type("UsingOne", 2, 7)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); + + tokens.should.contain(Tokens.UsingKeyword("using", 3, 1)); + tokens.should.contain(Tokens.Type("one", 3, 7)); + tokens.should.contain(Tokens.Operators.Assignment(3, 11)); + tokens.should.contain(Tokens.Type("UsingOne", 3, 13)); + tokens.should.contain(Tokens.Puncuation.Accessor(3, 21)); + tokens.should.contain(Tokens.Type("Something", 3, 22)); + tokens.should.contain(Tokens.Puncuation.Semicolon(3, 31)); + + tokens.should.contain(Tokens.Keywords.Namespace(5, 1)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11)); + + tokens.should.contain(Tokens.UsingKeyword("using", 7, 5)); + tokens.should.contain(Tokens.Type("UsingTwo", 7, 11)); + tokens.should.contain(Tokens.Puncuation.Semicolon(7, 19)); + + tokens.should.contain(Tokens.UsingKeyword("using", 8, 5)); + tokens.should.contain(Tokens.Type("two", 8, 11)); + tokens.should.contain(Tokens.Operators.Assignment(8, 15)); + tokens.should.contain(Tokens.Type("UsingTwo", 8, 17)); + tokens.should.contain(Tokens.Puncuation.Accessor(8, 25)); + tokens.should.contain(Tokens.Type("Something", 8, 26)); + tokens.should.contain(Tokens.Puncuation.Semicolon(8, 35)); + + tokens.should.contain(Tokens.Keywords.Namespace(10, 5)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15)); + + tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); + tokens.should.contain(Tokens.Type("UsingThree", 12, 15)); + tokens.should.contain(Tokens.Puncuation.Semicolon(12, 25)); + + tokens.should.contain(Tokens.UsingKeyword("using", 13, 9)); + tokens.should.contain(Tokens.Type("three", 13, 15)); + tokens.should.contain(Tokens.Operators.Assignment(13, 21)); + tokens.should.contain(Tokens.Type("UsingThree", 13, 23)); + tokens.should.contain(Tokens.Puncuation.Accessor(13, 33)); + tokens.should.contain(Tokens.Type("Something", 13, 34)); + tokens.should.contain(Tokens.Puncuation.Semicolon(13, 43)); + + }); + }); +}); + + diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index d16ea4b754..80f872a231 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -88,6 +88,9 @@ export namespace Tokens { export const Extern = (line?: number, column?: number) => createToken("extern", "keyword.other.extern.cs", line, column); + export const Namespace = (line?: number, column?: number) => + createToken("namespace", "keyword.other.namespace.cs", line, column); + export const Static = (line?: number, column?: number) => createToken("static", "keyword.other.static.cs", line, column); @@ -96,6 +99,9 @@ export namespace Tokens { } export namespace Identifiers { + export const NamespaceName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.namespace.cs", line, column); + export const PropertyName = (text: string, line?: number, column?: number) => createToken(text, "entity.name.function.cs", line, column); } @@ -181,12 +187,6 @@ export namespace Tokens { createToken(text, "variable.other.alias.cs", line, column); } - export const NamespaceKeyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.namespace.cs", line, column); - - export const NamespaceIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.namespace.cs", line, column); - export const UsingKeyword = (text: string, line?: number, column?: number) => createToken(text, "keyword.other.using.cs", line, column); From 8a242d4ffb7a25788ea8954529d073339b4b1005 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 14:54:46 -0800 Subject: [PATCH 013/192] Refine using directives a bit --- syntaxes/csharp2.json | 47 ++++++++++- test/syntaxes/namespaces.test.syntax.ts | 12 +-- test/syntaxes/using-directives.test.syntax.ts | 84 +++++++++---------- test/syntaxes/utils/tokenizer.ts | 13 ++- 4 files changed, 102 insertions(+), 54 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 902d5b92dd..7374251e2d 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -50,7 +50,7 @@ "using-directive": { "patterns": [ { - "begin": "\\s*(using)\\b\\s*(static)*", + "begin": "\\b(using)\\b\\s+(static)\\s+", "beginCaptures": { "1": { "name": "keyword.other.using.cs" @@ -63,6 +63,47 @@ "patterns": [ { "include": "#type" + } + ] + }, + { + "begin": "\\b(using)\\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\\s*=)", + "beginCaptures": { + "1": { + "name": "keyword.other.using.cs" + }, + "2": { + "name": "entity.name.type.alias.cs" + } + }, + "end": "(?=;)", + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#type" + }, + { + "include": "#operator-assignment" + } + ] + }, + { + "begin": "\\b(using)\\s*", + "beginCaptures": { + "1": { + "name": "keyword.other.using.cs" + } + }, + "end": "(?=;)", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "entity.name.type.namespace.cs", + "match": "[_$[:alpha:]][_$[:alnum:]]*" }, { "include": "#operator-assignment" @@ -184,13 +225,13 @@ "begin": "\\{", "beginCaptures": { "0": { - "name": "punctuation.definition.block.cs" + "name": "punctuation.curlybrace.open.cs" } }, "end": "\\}", "endCaptures": { "0": { - "name": "punctuation.definition.block.cs" + "name": "punctuation.curlybrace.close.cs" } }, "patterns": [ diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index 7290105de6..d606c7e231 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -72,11 +72,11 @@ namespace TestNamespace let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.UsingKeyword("using", 2, 1)); - tokens.should.contain(Tokens.Type("UsingOne", 2, 7)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingOne", 2, 7)); tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); tokens.should.contain(Tokens.UsingKeyword("using", 3, 1)); - tokens.should.contain(Tokens.Type("one", 3, 7)); + tokens.should.contain(Tokens.Identifiers.AliasName("one", 3, 7)); tokens.should.contain(Tokens.Operators.Assignment(3, 11)); tokens.should.contain(Tokens.Type("UsingOne", 3, 13)); tokens.should.contain(Tokens.Puncuation.Accessor(3, 21)); @@ -87,11 +87,11 @@ namespace TestNamespace tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11)); tokens.should.contain(Tokens.UsingKeyword("using", 7, 5)); - tokens.should.contain(Tokens.Type("UsingTwo", 7, 11)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingTwo", 7, 11)); tokens.should.contain(Tokens.Puncuation.Semicolon(7, 19)); tokens.should.contain(Tokens.UsingKeyword("using", 8, 5)); - tokens.should.contain(Tokens.Type("two", 8, 11)); + tokens.should.contain(Tokens.Identifiers.AliasName("two", 8, 11)); tokens.should.contain(Tokens.Operators.Assignment(8, 15)); tokens.should.contain(Tokens.Type("UsingTwo", 8, 17)); tokens.should.contain(Tokens.Puncuation.Accessor(8, 25)); @@ -102,11 +102,11 @@ namespace TestNamespace tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15)); tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); - tokens.should.contain(Tokens.Type("UsingThree", 12, 15)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingThree", 12, 15)); tokens.should.contain(Tokens.Puncuation.Semicolon(12, 25)); tokens.should.contain(Tokens.UsingKeyword("using", 13, 9)); - tokens.should.contain(Tokens.Type("three", 13, 15)); + tokens.should.contain(Tokens.Identifiers.AliasName("three", 13, 15)); tokens.should.contain(Tokens.Operators.Assignment(13, 21)); tokens.should.contain(Tokens.Type("UsingThree", 13, 23)); tokens.should.contain(Tokens.Puncuation.Accessor(13, 33)); diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index 4e69fdb788..cb2a5b8e80 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -14,7 +14,7 @@ using System;`; let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Type("System", 2, 7)); + tokens.should.contain(Tokens.Identifiers.NamespaceName("System", 2, 7)); tokens.should.contain(Tokens.Puncuation.Semicolon(2, 13)); }); @@ -41,7 +41,7 @@ using S = System;`; let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Type("S", 2, 7)); + tokens.should.contain(Tokens.Identifiers.AliasName("S", 2, 7)); tokens.should.contain(Tokens.Operators.Assignment(2, 9)); tokens.should.contain(Tokens.Type("System", 2, 11)); tokens.should.contain(Tokens.Puncuation.Semicolon(2, 17)); @@ -55,7 +55,7 @@ using C = System.Console;`; let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Type("C", 2, 7)); + tokens.should.contain(Tokens.Identifiers.AliasName("C", 2, 7)); tokens.should.contain(Tokens.Operators.Assignment(2, 9)); tokens.should.contain(Tokens.Type("System", 2, 11)); tokens.should.contain(Tokens.Puncuation.Accessor(2, 17)); @@ -71,7 +71,7 @@ using IntList = System.Collections.Generic.List;`; let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Type("IntList", 2, 7)); + tokens.should.contain(Tokens.Identifiers.AliasName("IntList", 2, 7)); tokens.should.contain(Tokens.Operators.Assignment(2, 15)); tokens.should.contain(Tokens.Type("System", 2, 17)); tokens.should.contain(Tokens.Puncuation.Accessor(2, 23)); @@ -96,7 +96,7 @@ using X = System.Collections.Generic.Dictionary { const input = ` -using/**/X/**/=/**/Dictionary/**//**/>/**/;//end`; +using X =/**/Dictionary/**//**/>/**/;//end`; let tokens: Token[] = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 6)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 8)); - tokens.should.contain(Tokens.Type("X", 2, 10)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 11)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 13)); - tokens.should.contain(Tokens.Operators.Assignment(2, 15)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 16)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 18)); - tokens.should.contain(Tokens.Type("Dictionary", 2, 20)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 30)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 32)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 34)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 35)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 37)); - tokens.should.contain(Tokens.Type("int", 2, 39)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 42)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 44)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 46)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 47)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 49)); - tokens.should.contain(Tokens.Type("List", 2, 51)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 55)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 57)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 59)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 60)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 62)); - tokens.should.contain(Tokens.Type("string", 2, 64)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 70)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 72)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 74)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 75)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 77)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 79)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 80)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 82)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 84)); - tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 85)); - tokens.should.contain(Tokens.Comment.SingleLine.Text("end", 2, 87)); + tokens.should.contain(Tokens.Identifiers.AliasName("X", 2, 7)); + tokens.should.contain(Tokens.Operators.Assignment(2, 9)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 10)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 12)); + tokens.should.contain(Tokens.Type("Dictionary", 2, 14)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 24)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 26)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 28)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 29)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 31)); + tokens.should.contain(Tokens.Type("int", 2, 33)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 36)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 38)); + tokens.should.contain(Tokens.Puncuation.Comma(2, 40)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 41)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 43)); + tokens.should.contain(Tokens.Type("List", 2, 45)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 49)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 51)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 53)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 54)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 56)); + tokens.should.contain(Tokens.Type("string", 2, 58)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 64)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 66)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 68)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 69)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 71)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 73)); + tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 74)); + tokens.should.contain(Tokens.Comment.MultiLine.End(2, 76)); + tokens.should.contain(Tokens.Puncuation.Semicolon(2, 78)); + tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 79)); + tokens.should.contain(Tokens.Comment.SingleLine.Text("end", 2, 81)); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 80f872a231..7689aeda81 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -99,9 +99,12 @@ export namespace Tokens { } export namespace Identifiers { + export const AliasName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.alias.cs", line, column); + export const NamespaceName = (text: string, line?: number, column?: number) => createToken(text, "entity.name.type.namespace.cs", line, column); - + export const PropertyName = (text: string, line?: number, column?: number) => createToken(text, "entity.name.function.cs", line, column); } @@ -148,6 +151,14 @@ export namespace Tokens { export const Comma = (line?: number, column?: number) => createToken(",", "punctuation.separator.comma.cs", line, column); + export namespace CurlyBrace { + export const Close = (line?: number, column?: number) => + createToken("}", "punctuation.curlybrace.close.cs", line, column); + + export const Open = (line?: number, column?: number) => + createToken("{", "punctuation.curlybrace.open.cs", line, column); + } + export namespace Parenthesis { export const Close = (line?: number, column?: number) => createToken(")", "punctuation.parenthesis.close.cs", line, column); From 839b76d2d9bddaaf05e62c6c589af73876614cff Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 17:20:29 -0800 Subject: [PATCH 014/192] Add class declarations --- syntaxes/csharp2.json | 131 ++++++++++++++ test/syntaxes/class.test.syntax.ts | 175 ------------------ test/syntaxes/classes.test.syntax.ts | 226 ++++++++++++++++++++++++ test/syntaxes/namespaces.test.syntax.ts | 12 +- test/syntaxes/utils/tokenizer.ts | 75 +++++--- 5 files changed, 418 insertions(+), 201 deletions(-) delete mode 100644 test/syntaxes/class.test.syntax.ts create mode 100644 test/syntaxes/classes.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 7374251e2d..276890403f 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -199,6 +199,9 @@ "patterns": [ { "include": "#namespace-declaration" + }, + { + "include": "#type-declaration" } ] }, @@ -248,6 +251,134 @@ } ] }, + "type-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class|struct|interface|enum)\\s+)", + "end": "(?<=\\})", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "storage.modifier.cs", + "match": "\\b(new|public|protected|internal|private|abstract|sealed|static|partial)\\b" + }, + { + "begin": "(?=class)", + "end": "(?=\\{)", + "patterns": [ + { + "include": "#class-header" + } + ] + }, + { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#type-declaration" + }, + { + "include": "#punctuation-semicolon" + } + ] + } + ] + }, + "class-header": { + "patterns": [ + { + "comment": "C# grammar: class identifier type-parameter-list[opt]", + "match": "(class)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.class.cs" + }, + "2": { + "name": "entity.name.type.class.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": "(?=:)", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] + } + ] + }, + "generic-constraints": { + "begin": "(where)\\s+(\\w+)\\s*(:)", + "beginCaptures": { + "1": { + "name": "keyword.other.where.cs" + }, + "2": { + "name": "storage.type.cs" + }, + "3": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "name": "keyword.other.class.cs", + "match": "\\bclass\\b" + }, + { + "name": "keyword.other.struct.cs", + "match": "\\bstruct\\b" + }, + { + "match": "(new)\\s*(\\()\\s*(\\))", + "captures": { + "1": { + "name": "keyword.other.new.cs" + }, + "2": { + "name": "punctuation.parenthesis.open.cs" + }, + "3": { + "name": "punctuation.parenthesis.close.cs" + } + } + }, + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + }, + { + "include": "#generic-constraints" + } + ] + }, "expression": { "patterns": [ { diff --git a/test/syntaxes/class.test.syntax.ts b/test/syntaxes/class.test.syntax.ts deleted file mode 100644 index ec967ce46a..0000000000 --- a/test/syntaxes/class.test.syntax.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Class", function() { - it("class keyword and storage modifiers", function() { - -const input = ` -namespace TestNamespace -{ - public class PublicClass { } - - class DefaultClass { } - - internal class InternalClass { } - - static class DefaultStaticClass { } - - public static class PublicStaticClass { } - - sealed class DefaultSealedClass { } - - public sealed class PublicSealedClass { } - - public abstract class PublicAbstractClass { } - - abstract class DefaultAbstractClass { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.ClassKeyword("class", 4, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 30)); - - tokens.should.contain(Tokens.ClassKeyword("class", 6, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultClass", 6, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("internal", 8, 5)); - tokens.should.contain(Tokens.ClassKeyword("class", 8, 24)); - tokens.should.contain(Tokens.ClassIdentifier("InternalClass", 8, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("static", 10, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 10, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultStaticClass", 10, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 12, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("static", 12, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 12, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicStaticClass", 12, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 14, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 14, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultSealedClass", 14, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 16, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 16, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 16, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicSealedClass", 16, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 18, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 18, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 18, 24)); - tokens.should.contain(Tokens.ClassIdentifier("PublicAbstractClass", 18, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 20, 15)); - tokens.should.contain(Tokens.ClassKeyword("class", 20, 24)); - tokens.should.contain(Tokens.ClassIdentifier("DefaultAbstractClass", 20, 30)); - - }); - - it("generics in identifier", function () { - - const input = ` -namespace TestNamespace -{ - class Dictionary> { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - - // OLD: tokens.should.contain(Tokens.ClassIdentifier("Dictionary>", 4, 11)); - tokens.should.contain(Tokens.ClassIdentifier("Dictionary", 4, 11)); - tokens.should.contain(Tokens.ClassIdentifier("T", 4, 22)); - tokens.should.contain(Tokens.ClassIdentifier("Dictionary", 4, 25)); - }); - - it("inheritance", function() { - -const input = ` -namespace TestNamespace -{ - class PublicClass : IInterface, IInterfaceTwo { } - class PublicClass : Root.IInterface, Something.IInterfaceTwo { } - class PublicClass : Dictionary>, IMap> { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Type("IInterface", 4, 28)); - tokens.should.contain(Tokens.Type("IInterfaceTwo", 4, 43)); - - tokens.should.contain(Tokens.ClassKeyword("class", 5, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Root.IInterface", 5, 28)); - tokens.should.contain(Tokens.Type("Something.IInterfaceTwo", 5, 63)); - tokens.should.contain(Tokens.Type("Dictionary", 6, 28)); - tokens.should.contain(Tokens.Type("Dictionary", 6, 42)); - tokens.should.contain(Tokens.Type("IMap", 6, 71)); - tokens.should.contain(Tokens.Type("Dictionary", 6, 79)); - }); - - it("generic constraints", function() { - -const input = ` -namespace TestNamespace -{ - class PublicClass where T : ISomething { } - class PublicClass : Dictionary[]>, ISomething where T : ICar, new() where X : struct { } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Keyword("where", 4, 26)); - tokens.should.contain(Tokens.Type("T", 4, 32)); - tokens.should.contain(Tokens.Type("ISomething", 4, 36)); - - tokens.should.contain(Tokens.ClassKeyword("class", 5, 5)); - tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Dictionary", 5, 31)); - tokens.should.contain(Tokens.Type("List[]", 5, 45)); - tokens.should.contain(Tokens.Type("ISomething", 5, 62)); - tokens.should.contain(Tokens.Keyword("where", 5, 73)); - tokens.should.contain(Tokens.Type("T", 5, 79)); - tokens.should.contain(Tokens.Type("ICar", 5, 83)); - tokens.should.contain(Tokens.Keyword("new", 5, 89)); - tokens.should.contain(Tokens.Keyword("where", 5, 95)); - tokens.should.contain(Tokens.Type("X", 5, 101)); - tokens.should.contain(Tokens.Keyword("struct", 5, 105)); - - }); - - it("nested class", function() { - -const input = ` -namespace TestNamespace -{ - class Klass - { - public class Nested - { - - } - } -}`; - let tokens: Token[] = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.ClassKeyword("class", 4, 5)); - tokens.should.contain(Tokens.ClassIdentifier("Klass", 4, 11)); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 6, 9)); - tokens.should.contain(Tokens.ClassKeyword("class", 6, 16)); - tokens.should.contain(Tokens.ClassIdentifier("Nested", 6, 22)); - }); - }); -}); - - diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts new file mode 100644 index 0000000000..26eacae551 --- /dev/null +++ b/test/syntaxes/classes.test.syntax.ts @@ -0,0 +1,226 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => { + should(); + }); + + describe("Class", () => { + it("class keyword and storage modifiers", () => { + + const input = ` +namespace TestNamespace +{ + public class PublicClass { } + + class DefaultClass { } + + internal class InternalClass { } + + static class DefaultStaticClass { } + + public static class PublicStaticClass { } + + sealed class DefaultSealedClass { } + + public sealed class PublicSealedClass { } + + public abstract class PublicAbstractClass { } + + abstract class DefaultAbstractClass { } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Modifiers.Public(4, 5)); + tokens.should.contain(Tokens.Keywords.Class(4, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 30)); + + tokens.should.contain(Tokens.Keywords.Class(6, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("DefaultClass", 6, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Internal(8, 5)); + tokens.should.contain(Tokens.Keywords.Class(8, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("InternalClass", 8, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Static(10, 15)); + tokens.should.contain(Tokens.Keywords.Class(10, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("DefaultStaticClass", 10, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Public(12, 5)); + tokens.should.contain(Tokens.Keywords.Modifiers.Static(12, 15)); + tokens.should.contain(Tokens.Keywords.Class(12, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicStaticClass", 12, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Sealed(14, 15)); + tokens.should.contain(Tokens.Keywords.Class(14, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("DefaultSealedClass", 14, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Public(16, 5)); + tokens.should.contain(Tokens.Keywords.Modifiers.Sealed(16, 15)); + tokens.should.contain(Tokens.Keywords.Class(16, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicSealedClass", 16, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Public(18, 5)); + tokens.should.contain(Tokens.Keywords.Modifiers.Abstract(18, 15)); + tokens.should.contain(Tokens.Keywords.Class(18, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicAbstractClass", 18, 30)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Abstract(20, 15)); + tokens.should.contain(Tokens.Keywords.Class(20, 24)); + tokens.should.contain(Tokens.Identifiers.ClassName("DefaultAbstractClass", 20, 30)); + + }); + + it("generics in identifier", () => { + + const input = ` +namespace TestNamespace +{ + class Dictionary { } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Class(4, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("Dictionary", 4, 11)); + }); + + it("inheritance", () => { + + const input = ` +namespace TestNamespace +{ + class PublicClass : IInterface, IInterfaceTwo { } + class PublicClass : Root.IInterface, Something.IInterfaceTwo { } + class PublicClass : Dictionary>, IMap> { } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Class(4, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 11)); + tokens.should.contain(Tokens.Type("IInterface", 4, 28)); + tokens.should.contain(Tokens.Type("IInterfaceTwo", 4, 43)); + + tokens.should.contain(Tokens.Keywords.Class(5, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 5, 11)); + tokens.should.contain(Tokens.Type("Root", 5, 28)); + tokens.should.contain(Tokens.Puncuation.Accessor(5, 32)); + tokens.should.contain(Tokens.Type("IInterface", 5, 33)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 43)); + tokens.should.contain(Tokens.Type("Something", 5, 44)); + tokens.should.contain(Tokens.Puncuation.Accessor(5, 53)); + tokens.should.contain(Tokens.Type("Nested", 5, 54)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 60)); + tokens.should.contain(Tokens.Puncuation.Comma(5, 61)); + tokens.should.contain(Tokens.Type("Something", 5, 63)); + tokens.should.contain(Tokens.Puncuation.Accessor(5, 72)); + tokens.should.contain(Tokens.Type("IInterfaceTwo", 5, 73)); + + tokens.should.contain(Tokens.Type("Dictionary", 6, 28)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 38)); + tokens.should.contain(Tokens.Type("T", 6, 39)) + tokens.should.contain(Tokens.Puncuation.Comma(6, 40)); + tokens.should.contain(Tokens.Type("Dictionary", 6, 42)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 52)); + tokens.should.contain(Tokens.Type("string", 6, 53)); + tokens.should.contain(Tokens.Puncuation.Comma(6, 59)); + tokens.should.contain(Tokens.Type("string", 6, 61)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 67)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 68)); + tokens.should.contain(Tokens.Puncuation.Comma(6, 69)); + tokens.should.contain(Tokens.Type("IMap", 6, 71)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 75)); + tokens.should.contain(Tokens.Type("T", 6, 76)) + tokens.should.contain(Tokens.Puncuation.Comma(6, 77)); + tokens.should.contain(Tokens.Type("Dictionary", 6, 79)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 89)); + tokens.should.contain(Tokens.Type("string", 6, 90)); + tokens.should.contain(Tokens.Puncuation.Comma(6, 96)); + tokens.should.contain(Tokens.Type("string", 6, 98)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 104)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 105)); + }); + + it("generic constraints", () => { + + const input = ` +namespace TestNamespace +{ + class PublicClass where T : ISomething { } + class PublicClass : Dictionary[]>, ISomething + where T : ICar, new() + where X : struct + { + } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + for (let t of tokens) { + console.log(t); + } + + tokens.should.contain(Tokens.Keywords.Class(4, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 11)); + tokens.should.contain(Tokens.Keywords.Where(4, 26)); + tokens.should.contain(Tokens.Type("T", 4, 32)); + tokens.should.contain(Tokens.Puncuation.Colon(4, 34)); + tokens.should.contain(Tokens.Type("ISomething", 4, 36)); + + tokens.should.contain(Tokens.Keywords.Class(5, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 5, 11)); + tokens.should.contain(Tokens.Type("Dictionary", 5, 31)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 41)); + tokens.should.contain(Tokens.Type("T", 5, 42)); + tokens.should.contain(Tokens.Puncuation.Comma(5, 43)); + tokens.should.contain(Tokens.Type("List", 5, 45)); + tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 49)); + tokens.should.contain(Tokens.Type("string", 5, 50)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 56)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(5, 57)); + tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(5, 58)); + tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 59)); + tokens.should.contain(Tokens.Puncuation.Comma(5, 60)); + tokens.should.contain(Tokens.Type("ISomething", 5, 62)); + tokens.should.contain(Tokens.Keywords.Where(6, 9)); + tokens.should.contain(Tokens.Type("T", 6, 15)); + tokens.should.contain(Tokens.Puncuation.Colon(6, 17)); + tokens.should.contain(Tokens.Type("ICar", 6, 19)); + tokens.should.contain(Tokens.Puncuation.Comma(6, 23)); + tokens.should.contain(Tokens.Keywords.New(6, 25)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(6, 28)); + tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(6, 29)); + tokens.should.contain(Tokens.Keywords.Where(7, 9)); + tokens.should.contain(Tokens.Type("X", 7, 15)); + tokens.should.contain(Tokens.Puncuation.Colon(7, 17)); + tokens.should.contain(Tokens.Keywords.Struct(7, 19)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(8, 5)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(9, 5)); + }); + + it("nested class", () => { + + const input = ` +namespace TestNamespace +{ + class Klass + { + public class Nested + { + + } + } +}`; + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Class(4, 5)); + tokens.should.contain(Tokens.Identifiers.ClassName("Klass", 4, 11)); + + tokens.should.contain(Tokens.Keywords.Modifiers.Public(6, 9)); + tokens.should.contain(Tokens.Keywords.Class(6, 16)); + tokens.should.contain(Tokens.Identifiers.ClassName("Nested", 6, 22)); + }); + }); +}); + + diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index d606c7e231..c6b7e13110 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -71,11 +71,11 @@ namespace TestNamespace }`; let tokens: Token[] = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.UsingKeyword("using", 2, 1)); + tokens.should.contain(Tokens.Keywords.Using(2, 1)); tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingOne", 2, 7)); tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); - tokens.should.contain(Tokens.UsingKeyword("using", 3, 1)); + tokens.should.contain(Tokens.Keywords.Using(3, 1)); tokens.should.contain(Tokens.Identifiers.AliasName("one", 3, 7)); tokens.should.contain(Tokens.Operators.Assignment(3, 11)); tokens.should.contain(Tokens.Type("UsingOne", 3, 13)); @@ -86,11 +86,11 @@ namespace TestNamespace tokens.should.contain(Tokens.Keywords.Namespace(5, 1)); tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11)); - tokens.should.contain(Tokens.UsingKeyword("using", 7, 5)); + tokens.should.contain(Tokens.Keywords.Using(7, 5)); tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingTwo", 7, 11)); tokens.should.contain(Tokens.Puncuation.Semicolon(7, 19)); - tokens.should.contain(Tokens.UsingKeyword("using", 8, 5)); + tokens.should.contain(Tokens.Keywords.Using(8, 5)); tokens.should.contain(Tokens.Identifiers.AliasName("two", 8, 11)); tokens.should.contain(Tokens.Operators.Assignment(8, 15)); tokens.should.contain(Tokens.Type("UsingTwo", 8, 17)); @@ -101,11 +101,11 @@ namespace TestNamespace tokens.should.contain(Tokens.Keywords.Namespace(10, 5)); tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15)); - tokens.should.contain(Tokens.UsingKeyword("using", 12, 9)); + tokens.should.contain(Tokens.Keywords.Using(12, 9)); tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingThree", 12, 15)); tokens.should.contain(Tokens.Puncuation.Semicolon(12, 25)); - tokens.should.contain(Tokens.UsingKeyword("using", 13, 9)); + tokens.should.contain(Tokens.Keywords.Using(13, 9)); tokens.should.contain(Tokens.Identifiers.AliasName("three", 13, 15)); tokens.should.contain(Tokens.Operators.Assignment(13, 21)); tokens.should.contain(Tokens.Type("UsingThree", 13, 23)); diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 7689aeda81..4a097acdfa 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -78,35 +78,79 @@ export namespace Tokens { } } + export namespace Identifiers { + export const AliasName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.alias.cs", line, column); + + export const ClassName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.class.cs", line, column); + + export const NamespaceName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.namespace.cs", line, column); + + export const PropertyName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.function.cs", line, column); + } + export namespace Keywords { + export namespace Modifiers { + export const Abstract = (line?: number, column?: number) => + createToken("abstract", "storage.modifier.cs", line, column); + + export const Internal = (line?: number, column?: number) => + createToken("internal", "storage.modifier.cs", line, column); + + export const New = (line?: number, column?: number) => + createToken("new", "storage.modifier.cs", line, column); + + export const Partial = (line?: number, column?: number) => + createToken("partial", "storage.modifier.cs", line, column); + + export const Private = (line?: number, column?: number) => + createToken("private", "storage.modifier.cs", line, column); + + export const Protected = (line?: number, column?: number) => + createToken("protected", "storage.modifier.cs", line, column); + + export const Public = (line?: number, column?: number) => + createToken("public", "storage.modifier.cs", line, column); + + export const Sealed = (line?: number, column?: number) => + createToken("sealed", "storage.modifier.cs", line, column); + + export const Static = (line?: number, column?: number) => + createToken("static", "storage.modifier.cs", line, column); + } + export const Alias = (line?: number, column?: number) => createToken("alias", "keyword.other.alias.cs", line, column); export const AttributeSpecifier = (text: string, line?: number, column?: number) => createToken(text, "keyword.other.attribute-specifier.cs", line, column); + export const Class = (line?: number, column?: number) => + createToken("class", "keyword.other.class.cs", line, column); + export const Extern = (line?: number, column?: number) => createToken("extern", "keyword.other.extern.cs", line, column); export const Namespace = (line?: number, column?: number) => createToken("namespace", "keyword.other.namespace.cs", line, column); + export const New = (line?: number, column?: number) => + createToken("new", "keyword.other.new.cs", line, column); + export const Static = (line?: number, column?: number) => createToken("static", "keyword.other.static.cs", line, column); + export const Struct = (line?: number, column?: number) => + createToken("struct", "keyword.other.struct.cs", line, column); + export const Using = (line?: number, column?: number) => createToken("using", "keyword.other.using.cs", line, column); - } - - export namespace Identifiers { - export const AliasName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.alias.cs", line, column); - export const NamespaceName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.namespace.cs", line, column); - - export const PropertyName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.function.cs", line, column); + export const Where = (line?: number, column?: number) => + createToken("where", "keyword.other.where.cs", line, column); } export namespace Literals { @@ -183,7 +227,7 @@ export namespace Tokens { createToken('"', "punctuation.definition.string.begin.cs", line, column); export const End = (line?: number, column?: number) => - createToken('"', "punctuation.definition.string.end.cs", line, column); + createToken('"', "punctuation.definition.string.end.cs", line, column); } export const TypeParametersBegin = (line?: number, column?: number) => @@ -198,15 +242,6 @@ export namespace Tokens { createToken(text, "variable.other.alias.cs", line, column); } - export const UsingKeyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.using.cs", line, column); - - export const ClassKeyword = (text: string, line?: number, column?: number) => - createToken(text, "storage.modifier.cs", line, column); - - export const ClassIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "storage.type.cs", line, column); - export const StorageModifierKeyword = (text: string, line?: number, column?: number) => createToken(text, "storage.modifier.cs", line, column); From 6b258ee5053df2ae3c269bfa7d2476e151b44ce5 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 17:41:11 -0800 Subject: [PATCH 015/192] Add interface declarations --- syntaxes/csharp2.json | 47 ++++++++++++- test/syntaxes/classes.test.syntax.ts | 4 -- test/syntaxes/interfaces.test.syntax.ts | 90 +++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 6 ++ 4 files changed, 142 insertions(+), 5 deletions(-) create mode 100644 test/syntaxes/interfaces.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 276890403f..9b0e03781a 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -271,6 +271,15 @@ } ] }, + { + "begin": "(?=interface)", + "end": "(?=\\{)", + "patterns": [ + { + "include": "#interface-header" + } + ] + }, { "begin": "\\{", "beginCaptures": { @@ -313,7 +322,43 @@ "include": "#generic-constraints" }, { - "begin": "(?=:)", + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] + } + ] + }, + "interface-header": { + "patterns": [ + { + "comment": "C# grammar: interface identifier variant-type-parameter-list[opt]", + "match": "(interface)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.interface.cs" + }, + "2": { + "name": "entity.name.type.interface.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": ":", "beginCaptures": { "0": { "name": "punctuation.separator.colon.cs" diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 26eacae551..103762ee7a 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -156,10 +156,6 @@ namespace TestNamespace }`; let tokens: Token[] = TokenizerUtil.tokenize2(input); - for (let t of tokens) { - console.log(t); - } - tokens.should.contain(Tokens.Keywords.Class(4, 5)); tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 11)); tokens.should.contain(Tokens.Keywords.Where(4, 26)); diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts new file mode 100644 index 0000000000..fdde39b2f5 --- /dev/null +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -0,0 +1,90 @@ +import { should } from 'chai'; +import { Tokens, Token } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => { + should(); + }); + + describe("Interfaces", () => { + it("simple interface", () => { + + const input = ` +interface IFoo { } +`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Interface(2, 1)); + tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 16)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 18)); + }); + + it("interface inheritance", () => { + + const input = ` +interface IFoo { } +interface IBar : IFoo { } +`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Interface(3, 1)); + tokens.should.contain(Tokens.Identifiers.InterfaceName("IBar", 3, 11)); + tokens.should.contain(Tokens.Puncuation.Colon(3, 16)); + tokens.should.contain(Tokens.Type("IFoo", 3, 18)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(3, 23)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(3, 25)); + }); + + it("generic interface", () => { + + const input = ` +interface IFoo { } +`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Interface(2, 1)); + tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 24)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 26)); + }); + + it("generic interface with variance", () => { + + const input = ` +interface IFoo { } +`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Interface(2, 1)); + tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 31)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 33)); + }); + + it("generic interface with constraints", () => { + + const input = ` +interface IFoo where T1 : T2 { } +`; + + let tokens: Token[] = TokenizerUtil.tokenize2(input); + + tokens.should.contain(Tokens.Keywords.Interface(2, 1)); + tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); + tokens.should.contain(Tokens.Keywords.Where(2, 24)); + tokens.should.contain(Tokens.Type("T1", 2, 30)); + tokens.should.contain(Tokens.Puncuation.Colon(2, 33)); + tokens.should.contain(Tokens.Type("T2", 2, 35)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 38)); + tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 40)); + }); + }); +}); + + diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 4a097acdfa..920342b0f4 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -85,6 +85,9 @@ export namespace Tokens { export const ClassName = (text: string, line?: number, column?: number) => createToken(text, "entity.name.type.class.cs", line, column); + export const InterfaceName = (text: string, line?: number, column?: number) => + createToken(text, "entity.name.type.interface.cs", line, column); + export const NamespaceName = (text: string, line?: number, column?: number) => createToken(text, "entity.name.type.namespace.cs", line, column); @@ -134,6 +137,9 @@ export namespace Tokens { export const Extern = (line?: number, column?: number) => createToken("extern", "keyword.other.extern.cs", line, column); + export const Interface = (line?: number, column?: number) => + createToken("interface", "keyword.other.interface.cs", line, column); + export const Namespace = (line?: number, column?: number) => createToken("namespace", "keyword.other.namespace.cs", line, column); From 30bcf5645a99b74149125bceb99701d9640c13a2 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 18:47:48 -0800 Subject: [PATCH 016/192] Refactor test infrastructure a bit and improve test performance by 2X --- test/syntaxes/attributes.test.syntax.ts | 225 +++++------ test/syntaxes/boolean-literals.test.syntax.ts | 25 +- test/syntaxes/classes.test.syntax.ts | 353 +++++++++++------- test/syntaxes/comments.test.syntax.ts | 32 +- test/syntaxes/event.test.syntax.ts | 11 +- test/syntaxes/extern-aliases.test.syntax.ts | 26 +- test/syntaxes/field.test.syntax.ts | 19 +- test/syntaxes/interfaces.test.syntax.ts | 86 +++-- test/syntaxes/namespaces.test.syntax.ts | 144 ++++--- test/syntaxes/numeric-literals.test.syntax.ts | 9 +- test/syntaxes/property.test.syntax.ts | 21 +- test/syntaxes/string.test.syntax.ts | 19 +- test/syntaxes/using-directives.test.syntax.ts | 236 ++++++------ test/syntaxes/utils/tokenizer.ts | 155 ++++---- test/syntaxes/utils/tokenizerUtil.ts | 5 + 15 files changed, 787 insertions(+), 579 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index e97100ed12..cf5df10d5e 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -11,11 +16,12 @@ describe("Grammar", () => { const input = ` [Foo]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 5)); + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.SquareBracket.Close(2, 5)]); }); it("global attribute with specifier", () => { @@ -23,13 +29,14 @@ describe("Grammar", () => { const input = ` [assembly: Foo]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Keywords.AttributeSpecifier("assembly", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Colon(2, 10)); - tokens.should.contain(Tokens.Type("Foo", 2, 12)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Keywords.AttributeSpecifier("assembly", 2, 2), + Tokens.Puncuation.Colon(2, 10), + Tokens.Type("Foo", 2, 12), + Tokens.Puncuation.SquareBracket.Close(2, 15)]); }); it("Two global attributes in same section with specifier", () => { @@ -37,15 +44,16 @@ describe("Grammar", () => { const input = ` [module: Foo, Bar]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); - tokens.should.contain(Tokens.Type("Foo", 2, 10)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 13)); - tokens.should.contain(Tokens.Type("Bar", 2, 15)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 18)); + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Keywords.AttributeSpecifier("module", 2, 2), + Tokens.Puncuation.Colon(2, 8), + Tokens.Type("Foo", 2, 10), + Tokens.Puncuation.Comma(2, 13), + Tokens.Type("Bar", 2, 15), + Tokens.Puncuation.SquareBracket.Close(2, 18)]); }); it("Two global attributes in same section with specifier and empty argument lists", () => { @@ -53,19 +61,20 @@ describe("Grammar", () => { const input = ` [module: Foo(), Bar()]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); - tokens.should.contain(Tokens.Type("Foo", 2, 10)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 13)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 15)); - tokens.should.contain(Tokens.Type("Bar", 2, 17)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 20)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 21)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 22)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Keywords.AttributeSpecifier("module", 2, 2), + Tokens.Puncuation.Colon(2, 8), + Tokens.Type("Foo", 2, 10), + Tokens.Puncuation.Parenthesis.Open(2, 13), + Tokens.Puncuation.Parenthesis.Close(2, 14), + Tokens.Puncuation.Comma(2, 15), + Tokens.Type("Bar", 2, 17), + Tokens.Puncuation.Parenthesis.Open(2, 20), + Tokens.Puncuation.Parenthesis.Close(2, 21), + Tokens.Puncuation.SquareBracket.Close(2, 22)]); }); it("Global attribute with one argument", () => { @@ -73,14 +82,15 @@ describe("Grammar", () => { const input = ` [Foo(true)]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); - tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 10)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 11)); + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.Parenthesis.Open(2, 5), + Tokens.Literals.Boolean.True(2, 6), + Tokens.Puncuation.Parenthesis.Close(2, 10), + Tokens.Puncuation.SquareBracket.Close(2, 11)]); }); it("Global attribute with two arguments", () => { @@ -88,16 +98,17 @@ describe("Grammar", () => { const input = ` [Foo(true, 42)]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); - tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); - tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.Parenthesis.Open(2, 5), + Tokens.Literals.Boolean.True(2, 6), + Tokens.Puncuation.Comma(2, 10), + Tokens.Literals.Numeric.Decimal("42", 2, 12), + Tokens.Puncuation.Parenthesis.Close(2, 14), + Tokens.Puncuation.SquareBracket.Close(2, 15)]); }); it("Global attribute with three arguments", () => { @@ -105,20 +116,21 @@ describe("Grammar", () => { const input = ` [Foo(true, 42, "text")]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); - tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); - tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 14)); - tokens.should.contain(Tokens.Puncuation.String.Begin(2, 16)); - tokens.should.contain(Tokens.Literals.String("text", 2, 17)); - tokens.should.contain(Tokens.Puncuation.String.End(2, 21)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 22)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 23)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.Parenthesis.Open(2, 5), + Tokens.Literals.Boolean.True(2, 6), + Tokens.Puncuation.Comma(2, 10), + Tokens.Literals.Numeric.Decimal("42", 2, 12), + Tokens.Puncuation.Comma(2, 14), + Tokens.Puncuation.String.Begin(2, 16), + Tokens.Literals.String("text", 2, 17), + Tokens.Puncuation.String.End(2, 21), + Tokens.Puncuation.Parenthesis.Close(2, 22), + Tokens.Puncuation.SquareBracket.Close(2, 23)]); }); it("Global attribute with named argument", () => { @@ -126,16 +138,17 @@ describe("Grammar", () => { const input = ` [Foo(Bar = 42)]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); - tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 6)); - tokens.should.contain(Tokens.Operators.Assignment(2, 10)); - tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 12)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 14)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 15)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.Parenthesis.Open(2, 5), + Tokens.Identifiers.PropertyName("Bar", 2, 6), + Tokens.Operators.Assignment(2, 10), + Tokens.Literals.Numeric.Decimal("42", 2, 12), + Tokens.Puncuation.Parenthesis.Close(2, 14), + Tokens.Puncuation.SquareBracket.Close(2, 15)]); }); it("Global attribute with one positional argument and one named argument", () => { @@ -143,18 +156,19 @@ describe("Grammar", () => { const input = ` [Foo(true, Bar = 42)]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 5)); - tokens.should.contain(Tokens.Literals.Boolean.True(2, 6)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 10)); - tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 12)); - tokens.should.contain(Tokens.Operators.Assignment(2, 16)); - tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 18)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 20)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 21)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Type("Foo", 2, 2), + Tokens.Puncuation.Parenthesis.Open(2, 5), + Tokens.Literals.Boolean.True(2, 6), + Tokens.Puncuation.Comma(2, 10), + Tokens.Identifiers.PropertyName("Bar", 2, 12), + Tokens.Operators.Assignment(2, 16), + Tokens.Literals.Numeric.Decimal("42", 2, 18), + Tokens.Puncuation.Parenthesis.Close(2, 20), + Tokens.Puncuation.SquareBracket.Close(2, 21)]); }); it("Global attribute with specifier, one positional argument, and two named arguments", () => { @@ -162,26 +176,27 @@ describe("Grammar", () => { const input = ` [module: Foo(true, Bar = 42, Baz = "hello")]`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Keywords.AttributeSpecifier("module", 2, 2)); - tokens.should.contain(Tokens.Puncuation.Colon(2, 8)); - tokens.should.contain(Tokens.Type("Foo", 2, 10)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(2, 13)); - tokens.should.contain(Tokens.Literals.Boolean.True(2, 14)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 18)); - tokens.should.contain(Tokens.Identifiers.PropertyName("Bar", 2, 20)); - tokens.should.contain(Tokens.Operators.Assignment(2, 24)); - tokens.should.contain(Tokens.Literals.Numeric.Decimal("42", 2, 26)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 28)); - tokens.should.contain(Tokens.Identifiers.PropertyName("Baz", 2, 30)); - tokens.should.contain(Tokens.Operators.Assignment(2, 34)); - tokens.should.contain(Tokens.Puncuation.String.Begin(2, 36)); - tokens.should.contain(Tokens.Literals.String("hello", 2, 37)); - tokens.should.contain(Tokens.Puncuation.String.End(2, 42)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(2, 43)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 44)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Puncuation.SquareBracket.Open(2, 1), + Tokens.Keywords.AttributeSpecifier("module", 2, 2), + Tokens.Puncuation.Colon(2, 8), + Tokens.Type("Foo", 2, 10), + Tokens.Puncuation.Parenthesis.Open(2, 13), + Tokens.Literals.Boolean.True(2, 14), + Tokens.Puncuation.Comma(2, 18), + Tokens.Identifiers.PropertyName("Bar", 2, 20), + Tokens.Operators.Assignment(2, 24), + Tokens.Literals.Numeric.Decimal("42", 2, 26), + Tokens.Puncuation.Comma(2, 28), + Tokens.Identifiers.PropertyName("Baz", 2, 30), + Tokens.Operators.Assignment(2, 34), + Tokens.Puncuation.String.Begin(2, 36), + Tokens.Literals.String("hello", 2, 37), + Tokens.Puncuation.String.End(2, 42), + Tokens.Puncuation.Parenthesis.Close(2, 43), + Tokens.Puncuation.SquareBracket.Close(2, 44)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index 8ad04a0c56..f8e8474e92 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -15,9 +20,14 @@ class C { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Literals.Boolean.True(4, 17)); + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Literals.Boolean.True(4, 17), + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); it("false", () => { @@ -29,9 +39,14 @@ class C { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Literals.Boolean.False(4, 17)); + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Literals.Boolean.False(4, 17), + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 103762ee7a..da7383bd5b 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -1,11 +1,14 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { - before(() => { - should(); - }); + before(() => should()); describe("Class", () => { it("class keyword and storage modifiers", () => { @@ -31,46 +34,70 @@ namespace TestNamespace abstract class DefaultAbstractClass { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Modifiers.Public(4, 5)); - tokens.should.contain(Tokens.Keywords.Class(4, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 30)); - - tokens.should.contain(Tokens.Keywords.Class(6, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("DefaultClass", 6, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Internal(8, 5)); - tokens.should.contain(Tokens.Keywords.Class(8, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("InternalClass", 8, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Static(10, 15)); - tokens.should.contain(Tokens.Keywords.Class(10, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("DefaultStaticClass", 10, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Public(12, 5)); - tokens.should.contain(Tokens.Keywords.Modifiers.Static(12, 15)); - tokens.should.contain(Tokens.Keywords.Class(12, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicStaticClass", 12, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Sealed(14, 15)); - tokens.should.contain(Tokens.Keywords.Class(14, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("DefaultSealedClass", 14, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Public(16, 5)); - tokens.should.contain(Tokens.Keywords.Modifiers.Sealed(16, 15)); - tokens.should.contain(Tokens.Keywords.Class(16, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicSealedClass", 16, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Public(18, 5)); - tokens.should.contain(Tokens.Keywords.Modifiers.Abstract(18, 15)); - tokens.should.contain(Tokens.Keywords.Class(18, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicAbstractClass", 18, 30)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Abstract(20, 15)); - tokens.should.contain(Tokens.Keywords.Class(20, 24)); - tokens.should.contain(Tokens.Identifiers.ClassName("DefaultAbstractClass", 20, 30)); - + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Keywords.Class(4, 24), + Tokens.Identifiers.ClassName("PublicClass", 4, 30), + Tokens.Puncuation.CurlyBrace.Open(4, 42), + Tokens.Puncuation.CurlyBrace.Close(4, 44), + + Tokens.Keywords.Class(6, 24), + Tokens.Identifiers.ClassName("DefaultClass", 6, 30), + Tokens.Puncuation.CurlyBrace.Open(6, 43), + Tokens.Puncuation.CurlyBrace.Close(6, 45), + + Tokens.Keywords.Modifiers.Internal(8, 5), + Tokens.Keywords.Class(8, 24), + Tokens.Identifiers.ClassName("InternalClass", 8, 30), + Tokens.Puncuation.CurlyBrace.Open(8, 44), + Tokens.Puncuation.CurlyBrace.Close(8, 46), + + Tokens.Keywords.Modifiers.Static(10, 15), + Tokens.Keywords.Class(10, 24), + Tokens.Identifiers.ClassName("DefaultStaticClass", 10, 30), + Tokens.Puncuation.CurlyBrace.Open(10, 49), + Tokens.Puncuation.CurlyBrace.Close(10, 51), + + Tokens.Keywords.Modifiers.Public(12, 5), + Tokens.Keywords.Modifiers.Static(12, 15), + Tokens.Keywords.Class(12, 24), + Tokens.Identifiers.ClassName("PublicStaticClass", 12, 30), + Tokens.Puncuation.CurlyBrace.Open(12, 48), + Tokens.Puncuation.CurlyBrace.Close(12, 50), + + Tokens.Keywords.Modifiers.Sealed(14, 15), + Tokens.Keywords.Class(14, 24), + Tokens.Identifiers.ClassName("DefaultSealedClass", 14, 30), + Tokens.Puncuation.CurlyBrace.Open(14, 49), + Tokens.Puncuation.CurlyBrace.Close(14, 51), + + Tokens.Keywords.Modifiers.Public(16, 5), + Tokens.Keywords.Modifiers.Sealed(16, 15), + Tokens.Keywords.Class(16, 24), + Tokens.Identifiers.ClassName("PublicSealedClass", 16, 30), + Tokens.Puncuation.CurlyBrace.Open(16, 48), + Tokens.Puncuation.CurlyBrace.Close(16, 50), + + Tokens.Keywords.Modifiers.Public(18, 5), + Tokens.Keywords.Modifiers.Abstract(18, 15), + Tokens.Keywords.Class(18, 24), + Tokens.Identifiers.ClassName("PublicAbstractClass", 18, 30), + Tokens.Puncuation.CurlyBrace.Open(18, 50), + Tokens.Puncuation.CurlyBrace.Close(18, 52), + + Tokens.Keywords.Modifiers.Abstract(20, 15), + Tokens.Keywords.Class(20, 24), + Tokens.Identifiers.ClassName("DefaultAbstractClass", 20, 30), + Tokens.Puncuation.CurlyBrace.Open(20, 51), + Tokens.Puncuation.CurlyBrace.Close(20, 53), + + Tokens.Puncuation.CurlyBrace.Close(21, 1)]); }); it("generics in identifier", () => { @@ -80,10 +107,19 @@ namespace TestNamespace { class Dictionary { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Class(4, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("Dictionary", 4, 11)); + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Class(4, 5), + Tokens.Identifiers.ClassName("Dictionary", 4, 11), + Tokens.Puncuation.CurlyBrace.Open(4, 36), + Tokens.Puncuation.CurlyBrace.Close(4, 38), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); }); it("inheritance", () => { @@ -95,51 +131,70 @@ namespace TestNamespace class PublicClass : Root.IInterface, Something.IInterfaceTwo { } class PublicClass : Dictionary>, IMap> { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Class(4, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Type("IInterface", 4, 28)); - tokens.should.contain(Tokens.Type("IInterfaceTwo", 4, 43)); - - tokens.should.contain(Tokens.Keywords.Class(5, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Root", 5, 28)); - tokens.should.contain(Tokens.Puncuation.Accessor(5, 32)); - tokens.should.contain(Tokens.Type("IInterface", 5, 33)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 43)); - tokens.should.contain(Tokens.Type("Something", 5, 44)); - tokens.should.contain(Tokens.Puncuation.Accessor(5, 53)); - tokens.should.contain(Tokens.Type("Nested", 5, 54)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 60)); - tokens.should.contain(Tokens.Puncuation.Comma(5, 61)); - tokens.should.contain(Tokens.Type("Something", 5, 63)); - tokens.should.contain(Tokens.Puncuation.Accessor(5, 72)); - tokens.should.contain(Tokens.Type("IInterfaceTwo", 5, 73)); - - tokens.should.contain(Tokens.Type("Dictionary", 6, 28)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 38)); - tokens.should.contain(Tokens.Type("T", 6, 39)) - tokens.should.contain(Tokens.Puncuation.Comma(6, 40)); - tokens.should.contain(Tokens.Type("Dictionary", 6, 42)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 52)); - tokens.should.contain(Tokens.Type("string", 6, 53)); - tokens.should.contain(Tokens.Puncuation.Comma(6, 59)); - tokens.should.contain(Tokens.Type("string", 6, 61)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 67)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 68)); - tokens.should.contain(Tokens.Puncuation.Comma(6, 69)); - tokens.should.contain(Tokens.Type("IMap", 6, 71)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 75)); - tokens.should.contain(Tokens.Type("T", 6, 76)) - tokens.should.contain(Tokens.Puncuation.Comma(6, 77)); - tokens.should.contain(Tokens.Type("Dictionary", 6, 79)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(6, 89)); - tokens.should.contain(Tokens.Type("string", 6, 90)); - tokens.should.contain(Tokens.Puncuation.Comma(6, 96)); - tokens.should.contain(Tokens.Type("string", 6, 98)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 104)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(6, 105)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Class(4, 5), + Tokens.Identifiers.ClassName("PublicClass", 4, 11), + Tokens.Puncuation.Colon(4, 26), + Tokens.Type("IInterface", 4, 28), + Tokens.Puncuation.Comma(4, 38), + Tokens.Type("IInterfaceTwo", 4, 43), + Tokens.Puncuation.CurlyBrace.Open(4, 57), + Tokens.Puncuation.CurlyBrace.Close(4, 59), + + Tokens.Keywords.Class(5, 5), + Tokens.Identifiers.ClassName("PublicClass", 5, 11), + Tokens.Puncuation.Colon(5, 26), + Tokens.Type("Root", 5, 28), + Tokens.Puncuation.Accessor(5, 32), + Tokens.Type("IInterface", 5, 33), + Tokens.Puncuation.TypeParameters.Begin(5, 43), + Tokens.Type("Something", 5, 44), + Tokens.Puncuation.Accessor(5, 53), + Tokens.Type("Nested", 5, 54), + Tokens.Puncuation.TypeParameters.End(5, 60), + Tokens.Puncuation.Comma(5, 61), + Tokens.Type("Something", 5, 63), + Tokens.Puncuation.Accessor(5, 72), + Tokens.Type("IInterfaceTwo", 5, 73), + Tokens.Puncuation.CurlyBrace.Open(5, 87), + Tokens.Puncuation.CurlyBrace.Close(5, 89), + + Tokens.Keywords.Class(6, 5), + Tokens.Identifiers.ClassName("PublicClass", 6, 11), + Tokens.Puncuation.Colon(6, 26), + Tokens.Type("Dictionary", 6, 28), + Tokens.Puncuation.TypeParameters.Begin(6, 38), + Tokens.Type("T", 6, 39), + Tokens.Puncuation.Comma(6, 40), + Tokens.Type("Dictionary", 6, 42), + Tokens.Puncuation.TypeParameters.Begin(6, 52), + Tokens.Type("string", 6, 53), + Tokens.Puncuation.Comma(6, 59), + Tokens.Type("string", 6, 61), + Tokens.Puncuation.TypeParameters.End(6, 67), + Tokens.Puncuation.TypeParameters.End(6, 68), + Tokens.Puncuation.Comma(6, 69), + Tokens.Type("IMap", 6, 71), + Tokens.Puncuation.TypeParameters.Begin(6, 75), + Tokens.Type("T", 6, 76), + Tokens.Puncuation.Comma(6, 77), + Tokens.Type("Dictionary", 6, 79), + Tokens.Puncuation.TypeParameters.Begin(6, 89), + Tokens.Type("string", 6, 90), + Tokens.Puncuation.Comma(6, 96), + Tokens.Type("string", 6, 98), + Tokens.Puncuation.TypeParameters.End(6, 104), + Tokens.Puncuation.TypeParameters.End(6, 105), + Tokens.Puncuation.CurlyBrace.Open(6, 107), + Tokens.Puncuation.CurlyBrace.Close(6, 109), + + Tokens.Puncuation.CurlyBrace.Close(7, 1)]); }); it("generic constraints", () => { @@ -154,44 +209,54 @@ namespace TestNamespace { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Class(4, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 4, 11)); - tokens.should.contain(Tokens.Keywords.Where(4, 26)); - tokens.should.contain(Tokens.Type("T", 4, 32)); - tokens.should.contain(Tokens.Puncuation.Colon(4, 34)); - tokens.should.contain(Tokens.Type("ISomething", 4, 36)); - - tokens.should.contain(Tokens.Keywords.Class(5, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("PublicClass", 5, 11)); - tokens.should.contain(Tokens.Type("Dictionary", 5, 31)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 41)); - tokens.should.contain(Tokens.Type("T", 5, 42)); - tokens.should.contain(Tokens.Puncuation.Comma(5, 43)); - tokens.should.contain(Tokens.Type("List", 5, 45)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(5, 49)); - tokens.should.contain(Tokens.Type("string", 5, 50)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 56)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(5, 57)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(5, 58)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(5, 59)); - tokens.should.contain(Tokens.Puncuation.Comma(5, 60)); - tokens.should.contain(Tokens.Type("ISomething", 5, 62)); - tokens.should.contain(Tokens.Keywords.Where(6, 9)); - tokens.should.contain(Tokens.Type("T", 6, 15)); - tokens.should.contain(Tokens.Puncuation.Colon(6, 17)); - tokens.should.contain(Tokens.Type("ICar", 6, 19)); - tokens.should.contain(Tokens.Puncuation.Comma(6, 23)); - tokens.should.contain(Tokens.Keywords.New(6, 25)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Open(6, 28)); - tokens.should.contain(Tokens.Puncuation.Parenthesis.Close(6, 29)); - tokens.should.contain(Tokens.Keywords.Where(7, 9)); - tokens.should.contain(Tokens.Type("X", 7, 15)); - tokens.should.contain(Tokens.Puncuation.Colon(7, 17)); - tokens.should.contain(Tokens.Keywords.Struct(7, 19)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(8, 5)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(9, 5)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Class(4, 5), + Tokens.Identifiers.ClassName("PublicClass", 4, 11), + Tokens.Keywords.Where(4, 26), + Tokens.Type("T", 4, 32), + Tokens.Puncuation.Colon(4, 34), + Tokens.Type("ISomething", 4, 36), + Tokens.Puncuation.CurlyBrace.Open(4, 47), + Tokens.Puncuation.CurlyBrace.Close(4, 49), + + Tokens.Keywords.Class(5, 5), + Tokens.Identifiers.ClassName("PublicClass", 5, 11), + Tokens.Puncuation.Colon(5, 29), + Tokens.Type("Dictionary", 5, 31), + Tokens.Puncuation.TypeParameters.Begin(5, 41), + Tokens.Type("T", 5, 42), + Tokens.Puncuation.Comma(5, 43), + Tokens.Type("List", 5, 45), + Tokens.Puncuation.TypeParameters.Begin(5, 49), + Tokens.Type("string", 5, 50), + Tokens.Puncuation.TypeParameters.End(5, 56), + Tokens.Puncuation.SquareBracket.Open(5, 57), + Tokens.Puncuation.SquareBracket.Close(5, 58), + Tokens.Puncuation.TypeParameters.End(5, 59), + Tokens.Puncuation.Comma(5, 60), + Tokens.Type("ISomething", 5, 62), + Tokens.Keywords.Where(6, 9), + Tokens.Type("T", 6, 15), + Tokens.Puncuation.Colon(6, 17), + Tokens.Type("ICar", 6, 19), + Tokens.Puncuation.Comma(6, 23), + Tokens.Keywords.New(6, 25), + Tokens.Puncuation.Parenthesis.Open(6, 28), + Tokens.Puncuation.Parenthesis.Close(6, 29), + Tokens.Keywords.Where(7, 9), + Tokens.Type("X", 7, 15), + Tokens.Puncuation.Colon(7, 17), + Tokens.Keywords.Struct(7, 19), + Tokens.Puncuation.CurlyBrace.Open(8, 5), + Tokens.Puncuation.CurlyBrace.Close(9, 5), + + Tokens.Puncuation.CurlyBrace.Close(10, 1)]); }); it("nested class", () => { @@ -207,14 +272,26 @@ namespace TestNamespace } } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Class(4, 5)); - tokens.should.contain(Tokens.Identifiers.ClassName("Klass", 4, 11)); - - tokens.should.contain(Tokens.Keywords.Modifiers.Public(6, 9)); - tokens.should.contain(Tokens.Keywords.Class(6, 16)); - tokens.should.contain(Tokens.Identifiers.ClassName("Nested", 6, 22)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Class(4, 5), + Tokens.Identifiers.ClassName("Klass", 4, 11), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + + Tokens.Keywords.Modifiers.Public(6, 9), + Tokens.Keywords.Class(6, 16), + Tokens.Identifiers.ClassName("Nested", 6, 22), + Tokens.Puncuation.CurlyBrace.Open(7, 9), + Tokens.Puncuation.CurlyBrace.Close(9, 9), + + Tokens.Puncuation.CurlyBrace.Close(10, 5), + + Tokens.Puncuation.CurlyBrace.Close(11, 1)]); }); }); }); diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index d0df776e46..af87535d1e 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -11,10 +16,11 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 1)); - tokens.should.contain(Tokens.Comment.SingleLine.Text(" foo", 2, 3)); + tokens.should.deep.equal([ + Tokens.Comment.SingleLine.Start(2, 1), + Tokens.Comment.SingleLine.Text(" foo", 2, 3)]); }); it("single-line comment after whitespace", () => { @@ -22,11 +28,12 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Comment.LeadingWhitespace(" ", 2, 1)); - tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 5)); - tokens.should.contain(Tokens.Comment.SingleLine.Text(" foo", 2, 7)); + tokens.should.deep.equal([ + Tokens.Comment.LeadingWhitespace(" ", 2, 1), + Tokens.Comment.SingleLine.Start(2, 5), + Tokens.Comment.SingleLine.Text(" foo", 2, 7)]); }); it("multi-line comment", () => { @@ -34,11 +41,12 @@ describe("Grammar", () => { const input = ` /* foo */`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 1)); - tokens.should.contain(Tokens.Comment.MultiLine.Text(" foo ", 2, 3)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 8)); + tokens.should.deep.equal([ + Tokens.Comment.MultiLine.Start(2, 1), + Tokens.Comment.MultiLine.Text(" foo ", 2, 3), + Tokens.Comment.MultiLine.End(2, 8)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/event.test.syntax.ts b/test/syntaxes/event.test.syntax.ts index 2d13aa042a..3e99f3b339 100644 --- a/test/syntaxes/event.test.syntax.ts +++ b/test/syntaxes/event.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from'./utils/tokenizerUtil'; describe("Grammar", function() { @@ -16,7 +21,7 @@ public class Tester public event Type Event; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); @@ -32,7 +37,7 @@ public class Tester public event EventHandler, Dictionary> Event; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index 92fe723904..fa08a7229d 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -12,16 +17,17 @@ describe("Grammar", () => { extern alias X; extern alias Y;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Extern(2, 1)); - tokens.should.contain(Tokens.Keywords.Alias(2, 8)); - tokens.should.contain(Tokens.Variables.Alias("X", 2, 14)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); - tokens.should.contain(Tokens.Keywords.Extern(3, 1)); - tokens.should.contain(Tokens.Keywords.Alias(3, 8)); - tokens.should.contain(Tokens.Variables.Alias("Y", 3, 14)); - tokens.should.contain(Tokens.Puncuation.Semicolon(3, 15)); + tokens.should.deep.equal([ + Tokens.Keywords.Extern(2, 1), + Tokens.Keywords.Alias(2, 8), + Tokens.Variables.Alias("X", 2, 14), + Tokens.Puncuation.Semicolon(2, 15), + Tokens.Keywords.Extern(3, 1), + Tokens.Keywords.Alias(3, 8), + Tokens.Variables.Alias("Y", 3, 14), + Tokens.Puncuation.Semicolon(3, 15)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/field.test.syntax.ts b/test/syntaxes/field.test.syntax.ts index 1b72f5be66..dd00ef393b 100644 --- a/test/syntaxes/field.test.syntax.ts +++ b/test/syntaxes/field.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from'./utils/tokenizerUtil'; describe("Grammar", function() { @@ -18,7 +23,7 @@ public class Tester private List field123; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); tokens.should.contain(Tokens.Type("List", 4, 13)); @@ -36,7 +41,7 @@ public class Tester private Dictionary< List, Dictionary> _field; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); tokens.should.contain(Tokens.Type("Dictionary", 4, 13)); @@ -56,7 +61,7 @@ public class Tester string _field3; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); tokens.should.contain(Tokens.StorageModifierKeyword("static", 4, 13)); @@ -78,7 +83,7 @@ public class Tester string[] field123; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.Type("string", 4, 5)); tokens.should.contain(Tokens.FieldIdentifier("field123", 4, 12)); @@ -96,7 +101,7 @@ public class Tester const bool field = true; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); tokens.should.contain(Tokens.Type("string", 4, 13)); @@ -118,7 +123,7 @@ public class Tester const bool field => true; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); tokens.should.contain(Tokens.Type("string", 4, 13)); diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index fdde39b2f5..07bb9ca1ce 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -1,11 +1,14 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { - before(() => { - should(); - }); + before(() => should()); describe("Interfaces", () => { it("simple interface", () => { @@ -14,12 +17,13 @@ describe("Grammar", () => { interface IFoo { } `; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Interface(2, 1)); - tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 16)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 18)); + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(2, 16), + Tokens.Puncuation.CurlyBrace.Close(2, 18)]); }); it("interface inheritance", () => { @@ -29,14 +33,19 @@ interface IFoo { } interface IBar : IFoo { } `; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Interface(3, 1)); - tokens.should.contain(Tokens.Identifiers.InterfaceName("IBar", 3, 11)); - tokens.should.contain(Tokens.Puncuation.Colon(3, 16)); - tokens.should.contain(Tokens.Type("IFoo", 3, 18)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(3, 23)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(3, 25)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(2, 16), + Tokens.Puncuation.CurlyBrace.Close(2, 18), + Tokens.Keywords.Interface(3, 1), + Tokens.Identifiers.InterfaceName("IBar", 3, 11), + Tokens.Puncuation.Colon(3, 16), + Tokens.Type("IFoo", 3, 18), + Tokens.Puncuation.CurlyBrace.Open(3, 23), + Tokens.Puncuation.CurlyBrace.Close(3, 25)]); }); it("generic interface", () => { @@ -45,12 +54,13 @@ interface IBar : IFoo { } interface IFoo { } `; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Interface(2, 1)); - tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 24)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 26)); + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(2, 24), + Tokens.Puncuation.CurlyBrace.Close(2, 26)]); }); it("generic interface with variance", () => { @@ -59,12 +69,13 @@ interface IFoo { } interface IFoo { } `; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Interface(2, 1)); - tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 31)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 33)); + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(2, 31), + Tokens.Puncuation.CurlyBrace.Close(2, 33)]); }); it("generic interface with constraints", () => { @@ -73,16 +84,17 @@ interface IFoo { } interface IFoo where T1 : T2 { } `; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Interface(2, 1)); - tokens.should.contain(Tokens.Identifiers.InterfaceName("IFoo", 2, 11)); - tokens.should.contain(Tokens.Keywords.Where(2, 24)); - tokens.should.contain(Tokens.Type("T1", 2, 30)); - tokens.should.contain(Tokens.Puncuation.Colon(2, 33)); - tokens.should.contain(Tokens.Type("T2", 2, 35)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Open(2, 38)); - tokens.should.contain(Tokens.Puncuation.CurlyBrace.Close(2, 40)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Keywords.Where(2, 24), + Tokens.Type("T1", 2, 30), + Tokens.Puncuation.Colon(2, 33), + Tokens.Type("T2", 2, 35), + Tokens.Puncuation.CurlyBrace.Open(2, 38), + Tokens.Puncuation.CurlyBrace.Close(2, 40)]); }); }); }); diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index c6b7e13110..cf4e46d6ba 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -1,11 +1,14 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { - before(() => { - should(); - }); + before(() => should()); describe("Namespace", () => { it("has a namespace keyword and a name", () => { @@ -14,10 +17,13 @@ describe("Grammar", () => { namespace TestNamespace { }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11)); + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); it("has a namespace keyword and a dotted name", () => { @@ -26,12 +32,15 @@ namespace TestNamespace namespace Test.Namespace { }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("Test", 2, 11)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 15)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("Namespace", 2, 16)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("Test", 2, 11), + Tokens.Puncuation.Accessor(2, 15), + Tokens.Identifiers.NamespaceName("Namespace", 2, 16), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); it("can be nested", () => { @@ -43,13 +52,19 @@ namespace TestNamespace } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Namespace(2, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11)); + tokens.should.deep.equal([ + Tokens.Keywords.Namespace(2, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(3, 1), - tokens.should.contain(Tokens.Keywords.Namespace(4, 5)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 4, 15)); + Tokens.Keywords.Namespace(4, 5), + Tokens.Identifiers.NamespaceName("NestedNamespace", 4, 15), + Tokens.Puncuation.CurlyBrace.Open(4, 31), + + Tokens.Puncuation.CurlyBrace.Close(6, 5), + Tokens.Puncuation.CurlyBrace.Close(7, 1)]); }); it("can contain using statements", () => { @@ -69,50 +84,55 @@ namespace TestNamespace using three = UsingThree.Something; } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingOne", 2, 7)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 15)); - - tokens.should.contain(Tokens.Keywords.Using(3, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("one", 3, 7)); - tokens.should.contain(Tokens.Operators.Assignment(3, 11)); - tokens.should.contain(Tokens.Type("UsingOne", 3, 13)); - tokens.should.contain(Tokens.Puncuation.Accessor(3, 21)); - tokens.should.contain(Tokens.Type("Something", 3, 22)); - tokens.should.contain(Tokens.Puncuation.Semicolon(3, 31)); - - tokens.should.contain(Tokens.Keywords.Namespace(5, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11)); - - tokens.should.contain(Tokens.Keywords.Using(7, 5)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingTwo", 7, 11)); - tokens.should.contain(Tokens.Puncuation.Semicolon(7, 19)); - - tokens.should.contain(Tokens.Keywords.Using(8, 5)); - tokens.should.contain(Tokens.Identifiers.AliasName("two", 8, 11)); - tokens.should.contain(Tokens.Operators.Assignment(8, 15)); - tokens.should.contain(Tokens.Type("UsingTwo", 8, 17)); - tokens.should.contain(Tokens.Puncuation.Accessor(8, 25)); - tokens.should.contain(Tokens.Type("Something", 8, 26)); - tokens.should.contain(Tokens.Puncuation.Semicolon(8, 35)); - - tokens.should.contain(Tokens.Keywords.Namespace(10, 5)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15)); - - tokens.should.contain(Tokens.Keywords.Using(12, 9)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("UsingThree", 12, 15)); - tokens.should.contain(Tokens.Puncuation.Semicolon(12, 25)); - - tokens.should.contain(Tokens.Keywords.Using(13, 9)); - tokens.should.contain(Tokens.Identifiers.AliasName("three", 13, 15)); - tokens.should.contain(Tokens.Operators.Assignment(13, 21)); - tokens.should.contain(Tokens.Type("UsingThree", 13, 23)); - tokens.should.contain(Tokens.Puncuation.Accessor(13, 33)); - tokens.should.contain(Tokens.Type("Something", 13, 34)); - tokens.should.contain(Tokens.Puncuation.Semicolon(13, 43)); - + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.NamespaceName("UsingOne", 2, 7), + Tokens.Puncuation.Semicolon(2, 15), + + Tokens.Keywords.Using(3, 1), + Tokens.Identifiers.AliasName("one", 3, 7), + Tokens.Operators.Assignment(3, 11), + Tokens.Type("UsingOne", 3, 13), + Tokens.Puncuation.Accessor(3, 21), + Tokens.Type("Something", 3, 22), + Tokens.Puncuation.Semicolon(3, 31), + + Tokens.Keywords.Namespace(5, 1), + Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11), + Tokens.Puncuation.CurlyBrace.Open(6, 1), + + Tokens.Keywords.Using(7, 5), + Tokens.Identifiers.NamespaceName("UsingTwo", 7, 11), + Tokens.Puncuation.Semicolon(7, 19), + + Tokens.Keywords.Using(8, 5), + Tokens.Identifiers.AliasName("two", 8, 11), + Tokens.Operators.Assignment(8, 15), + Tokens.Type("UsingTwo", 8, 17), + Tokens.Puncuation.Accessor(8, 25), + Tokens.Type("Something", 8, 26), + Tokens.Puncuation.Semicolon(8, 35), + + Tokens.Keywords.Namespace(10, 5), + Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15), + Tokens.Puncuation.CurlyBrace.Open(11, 5), + + Tokens.Keywords.Using(12, 9), + Tokens.Identifiers.NamespaceName("UsingThree", 12, 15), + Tokens.Puncuation.Semicolon(12, 25), + + Tokens.Keywords.Using(13, 9), + Tokens.Identifiers.AliasName("three", 13, 15), + Tokens.Operators.Assignment(13, 21), + Tokens.Type("UsingThree", 13, 23), + Tokens.Puncuation.Accessor(13, 33), + Tokens.Type("Something", 13, 34), + Tokens.Puncuation.Semicolon(13, 43), + + Tokens.Puncuation.CurlyBrace.Close(14, 5), + Tokens.Puncuation.CurlyBrace.Close(15, 1)]); }); }); }); diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 71d26ef7e5..d7b129e3de 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -15,7 +20,7 @@ class C { } }`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); tokens.should.contain(Tokens.Type("Foo", 2, 2)); diff --git a/test/syntaxes/property.test.syntax.ts b/test/syntaxes/property.test.syntax.ts index 1bc549f66d..788ee7f709 100644 --- a/test/syntaxes/property.test.syntax.ts +++ b/test/syntaxes/property.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from'./utils/tokenizerUtil'; describe("Grammar", function() { @@ -19,7 +24,7 @@ class Tester set { something = value; } } }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("IBooom", 4, 12)); @@ -35,7 +40,7 @@ class Tester { public IBooom Property { get { return null; } private set { something = value; } } }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("IBooom", 4, 12)); @@ -53,7 +58,7 @@ class Tester { IBooom Property {get; set;} }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.Type("IBooom", 4, 5)); tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 12)); @@ -66,7 +71,7 @@ class Tester { public IBooom Property { get; set; } }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("IBooom", 4, 12)); @@ -86,7 +91,7 @@ class Tester set; } }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("IBooom", 4, 12)); @@ -102,7 +107,7 @@ class Tester { public Dictionary[]> Property { get; set; } }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("Dictionary", 4, 12)); @@ -121,7 +126,7 @@ class Tester public Dictionary[]> Property { get; } = new Dictionary[]>(); }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); tokens.should.contain(Tokens.Type("Dictionary", 4, 12)); diff --git a/test/syntaxes/string.test.syntax.ts b/test/syntaxes/string.test.syntax.ts index c7b9873d1e..b9e32abded 100644 --- a/test/syntaxes/string.test.syntax.ts +++ b/test/syntaxes/string.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from'./utils/tokenizerUtil'; describe("Grammar", function() { @@ -16,7 +21,7 @@ public class Tester string test = $"hello {one} world {two}!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuoted("hello ", 4, 21)); @@ -36,7 +41,7 @@ public class Tester string test = $"hello world!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuoted("hello world!", 4, 21)); @@ -52,7 +57,7 @@ public class Tester world!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 21)); @@ -69,7 +74,7 @@ public class Tester string test = $@"hello {one} world {two}!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); @@ -90,7 +95,7 @@ public class Tester world {two}!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); @@ -110,7 +115,7 @@ public class Tester world!"; }`; - let tokens: Token[] = TokenizerUtil.tokenize(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello", 4, 22)); diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index cb2a5b8e80..9f71534686 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -1,5 +1,10 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { should } from 'chai'; -import { Tokens, Token } from './utils/tokenizer'; +import { Tokens } from './utils/tokenizer'; import { TokenizerUtil } from './utils/tokenizerUtil'; describe("Grammar", () => { @@ -11,11 +16,12 @@ describe("Grammar", () => { const input = ` using System;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.NamespaceName("System", 2, 7)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 13)); + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.NamespaceName("System", 2, 7), + Tokens.Puncuation.Semicolon(2, 13)]); }); it("using static type", () => { @@ -23,14 +29,15 @@ using System;`; const input = ` using static System.Console;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Keywords.Static(2, 7)); - tokens.should.contain(Tokens.Type("System", 2, 14)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 20)); - tokens.should.contain(Tokens.Type("Console", 2, 21)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 28)); + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Keywords.Static(2, 7), + Tokens.Type("System", 2, 14), + Tokens.Puncuation.Accessor(2, 20), + Tokens.Type("Console", 2, 21), + Tokens.Puncuation.Semicolon(2, 28)]); }); it("namespace alias", () => { @@ -38,13 +45,14 @@ using static System.Console;`; const input = ` using S = System;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("S", 2, 7)); - tokens.should.contain(Tokens.Operators.Assignment(2, 9)); - tokens.should.contain(Tokens.Type("System", 2, 11)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 17)); + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.AliasName("S", 2, 7), + Tokens.Operators.Assignment(2, 9), + Tokens.Type("System", 2, 11), + Tokens.Puncuation.Semicolon(2, 17)]); }); it("type alias", () => { @@ -52,15 +60,16 @@ using S = System;`; const input = ` using C = System.Console;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize2(input); - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("C", 2, 7)); - tokens.should.contain(Tokens.Operators.Assignment(2, 9)); - tokens.should.contain(Tokens.Type("System", 2, 11)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 17)); - tokens.should.contain(Tokens.Type("Console", 2, 18)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 25)); + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.AliasName("C", 2, 7), + Tokens.Operators.Assignment(2, 9), + Tokens.Type("System", 2, 11), + Tokens.Puncuation.Accessor(2, 17), + Tokens.Type("Console", 2, 18), + Tokens.Puncuation.Semicolon(2, 25)]); }); it("type alias with generic type", () => { @@ -68,24 +77,25 @@ using C = System.Console;`; const input = ` using IntList = System.Collections.Generic.List;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("IntList", 2, 7)); - tokens.should.contain(Tokens.Operators.Assignment(2, 15)); - tokens.should.contain(Tokens.Type("System", 2, 17)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 23)); - tokens.should.contain(Tokens.Type("Collections", 2, 24)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 35)); - tokens.should.contain(Tokens.Type("Generic", 2, 36)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 43)); - tokens.should.contain(Tokens.Type("List", 2, 44)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 48)); - tokens.should.contain(Tokens.Type("System", 2, 49)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 55)); - tokens.should.contain(Tokens.Type("Int32", 2, 56)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 61)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 62)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.AliasName("IntList", 2, 7), + Tokens.Operators.Assignment(2, 15), + Tokens.Type("System", 2, 17), + Tokens.Puncuation.Accessor(2, 23), + Tokens.Type("Collections", 2, 24), + Tokens.Puncuation.Accessor(2, 35), + Tokens.Type("Generic", 2, 36), + Tokens.Puncuation.Accessor(2, 43), + Tokens.Type("List", 2, 44), + Tokens.Puncuation.TypeParameters.Begin(2, 48), + Tokens.Type("System", 2, 49), + Tokens.Puncuation.Accessor(2, 55), + Tokens.Type("Int32", 2, 56), + Tokens.Puncuation.TypeParameters.End(2, 61), + Tokens.Puncuation.Semicolon(2, 62)]); }); it("type alias with nested generic types", () => { @@ -93,37 +103,38 @@ using IntList = System.Collections.Generic.List;`; const input = ` using X = System.Collections.Generic.Dictionary>;`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("X", 2, 7)); - tokens.should.contain(Tokens.Operators.Assignment(2, 9)); - tokens.should.contain(Tokens.Type("System", 2, 11)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 17)); - tokens.should.contain(Tokens.Type("Collections", 2, 18)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 29)); - tokens.should.contain(Tokens.Type("Generic", 2, 30)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 37)); - tokens.should.contain(Tokens.Type("Dictionary", 2, 38)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 48)); - tokens.should.contain(Tokens.Type("System", 2, 49)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 55)); - tokens.should.contain(Tokens.Type("Int32", 2, 56)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 61)); - tokens.should.contain(Tokens.Type("System", 2, 63)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 69)); - tokens.should.contain(Tokens.Type("Collections", 2, 70)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 81)); - tokens.should.contain(Tokens.Type("Generic", 2, 82)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 89)); - tokens.should.contain(Tokens.Type("List", 2, 90)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 94)); - tokens.should.contain(Tokens.Type("System", 2, 95)); - tokens.should.contain(Tokens.Puncuation.Accessor(2, 101)); - tokens.should.contain(Tokens.Type("String", 2, 102)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 108)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 109)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 110)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.AliasName("X", 2, 7), + Tokens.Operators.Assignment(2, 9), + Tokens.Type("System", 2, 11), + Tokens.Puncuation.Accessor(2, 17), + Tokens.Type("Collections", 2, 18), + Tokens.Puncuation.Accessor(2, 29), + Tokens.Type("Generic", 2, 30), + Tokens.Puncuation.Accessor(2, 37), + Tokens.Type("Dictionary", 2, 38), + Tokens.Puncuation.TypeParameters.Begin(2, 48), + Tokens.Type("System", 2, 49), + Tokens.Puncuation.Accessor(2, 55), + Tokens.Type("Int32", 2, 56), + Tokens.Puncuation.Comma(2, 61), + Tokens.Type("System", 2, 63), + Tokens.Puncuation.Accessor(2, 69), + Tokens.Type("Collections", 2, 70), + Tokens.Puncuation.Accessor(2, 81), + Tokens.Type("Generic", 2, 82), + Tokens.Puncuation.Accessor(2, 89), + Tokens.Type("List", 2, 90), + Tokens.Puncuation.TypeParameters.Begin(2, 94), + Tokens.Type("System", 2, 95), + Tokens.Puncuation.Accessor(2, 101), + Tokens.Type("String", 2, 102), + Tokens.Puncuation.TypeParameters.End(2, 108), + Tokens.Puncuation.TypeParameters.End(2, 109), + Tokens.Puncuation.Semicolon(2, 110)]); }); it("type alias with nested generic types and comments interspersed", () => { @@ -131,43 +142,44 @@ using X = System.Collections.Generic.Dictionary/**/>/**/;//end`; - let tokens: Token[] = TokenizerUtil.tokenize2(input); - - tokens.should.contain(Tokens.Keywords.Using(2, 1)); - tokens.should.contain(Tokens.Identifiers.AliasName("X", 2, 7)); - tokens.should.contain(Tokens.Operators.Assignment(2, 9)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 10)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 12)); - tokens.should.contain(Tokens.Type("Dictionary", 2, 14)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 24)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 26)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 28)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 29)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 31)); - tokens.should.contain(Tokens.Type("int", 2, 33)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 36)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 38)); - tokens.should.contain(Tokens.Puncuation.Comma(2, 40)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 41)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 43)); - tokens.should.contain(Tokens.Type("List", 2, 45)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 49)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 51)); - tokens.should.contain(Tokens.Puncuation.TypeParametersBegin(2, 53)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 54)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 56)); - tokens.should.contain(Tokens.Type("string", 2, 58)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 64)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 66)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 68)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 69)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 71)); - tokens.should.contain(Tokens.Puncuation.TypeParametersEnd(2, 73)); - tokens.should.contain(Tokens.Comment.MultiLine.Start(2, 74)); - tokens.should.contain(Tokens.Comment.MultiLine.End(2, 76)); - tokens.should.contain(Tokens.Puncuation.Semicolon(2, 78)); - tokens.should.contain(Tokens.Comment.SingleLine.Start(2, 79)); - tokens.should.contain(Tokens.Comment.SingleLine.Text("end", 2, 81)); + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Using(2, 1), + Tokens.Identifiers.AliasName("X", 2, 7), + Tokens.Operators.Assignment(2, 9), + Tokens.Comment.MultiLine.Start(2, 10), + Tokens.Comment.MultiLine.End(2, 12), + Tokens.Type("Dictionary", 2, 14), + Tokens.Comment.MultiLine.Start(2, 24), + Tokens.Comment.MultiLine.End(2, 26), + Tokens.Puncuation.TypeParameters.Begin(2, 28), + Tokens.Comment.MultiLine.Start(2, 29), + Tokens.Comment.MultiLine.End(2, 31), + Tokens.Type("int", 2, 33), + Tokens.Comment.MultiLine.Start(2, 36), + Tokens.Comment.MultiLine.End(2, 38), + Tokens.Puncuation.Comma(2, 40), + Tokens.Comment.MultiLine.Start(2, 41), + Tokens.Comment.MultiLine.End(2, 43), + Tokens.Type("List", 2, 45), + Tokens.Comment.MultiLine.Start(2, 49), + Tokens.Comment.MultiLine.End(2, 51), + Tokens.Puncuation.TypeParameters.Begin(2, 53), + Tokens.Comment.MultiLine.Start(2, 54), + Tokens.Comment.MultiLine.End(2, 56), + Tokens.Type("string", 2, 58), + Tokens.Comment.MultiLine.Start(2, 64), + Tokens.Comment.MultiLine.End(2, 66), + Tokens.Puncuation.TypeParameters.End(2, 68), + Tokens.Comment.MultiLine.Start(2, 69), + Tokens.Comment.MultiLine.End(2, 71), + Tokens.Puncuation.TypeParameters.End(2, 73), + Tokens.Comment.MultiLine.Start(2, 74), + Tokens.Comment.MultiLine.End(2, 76), + Tokens.Puncuation.Semicolon(2, 78), + Tokens.Comment.SingleLine.Start(2, 79), + Tokens.Comment.SingleLine.Text("end", 2, 81)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 920342b0f4..2519709963 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -1,8 +1,16 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { ITokenizeLineResult, Registry, IGrammar, StackElement } from 'vscode-textmate'; export class Tokenizer { + private _registry: Registry; private _grammar: IGrammar; + private static readonly _excludedTypes: string[] = [ 'source.cs', 'meta.type.parameters.cs' ]; + constructor(grammarFilePath: string) { this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); } @@ -11,11 +19,11 @@ export class Tokenizer { let tokens: Token[] = []; // ensure consistent line-endings irrelevant of OS - input = input.replace("\r\n", "\n"); + input = input.replace('\r\n', '\n'); let previousStack: StackElement = null; - const lines: string[] = input.split("\n"); + const lines: string[] = input.split('\n'); for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { const line = lines[lineIndex]; @@ -26,7 +34,10 @@ export class Tokenizer { for (const token of result.tokens) { const text = line.substring(token.startIndex, token.endIndex); const type: string = token.scopes[token.scopes.length - 1]; - tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); + + if (Tokenizer._excludedTypes.indexOf(type) < 0) { + tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); + } } } @@ -56,231 +67,233 @@ export namespace Tokens { export namespace Comment { export const LeadingWhitespace = (text: string, line?: number, column?: number) => - createToken(text, "punctuation.whitespace.comment.leading.cs", line, column); + createToken(text, 'punctuation.whitespace.comment.leading.cs', line, column); export namespace MultiLine { export const End = (line?: number, column?: number) => - createToken("*/", "punctuation.definition.comment.cs", line, column); + createToken('*/', 'punctuation.definition.comment.cs', line, column); export const Start = (line?: number, column?: number) => - createToken("/*", "punctuation.definition.comment.cs", line, column); + createToken('/*', 'punctuation.definition.comment.cs', line, column); export const Text = (text: string, line?: number, column?: number) => - createToken(text, "comment.block.cs", line, column); + createToken(text, 'comment.block.cs', line, column); } export namespace SingleLine { export const Start = (line?: number, column?: number) => - createToken("//", "punctuation.definition.comment.cs", line, column); + createToken('//', 'punctuation.definition.comment.cs', line, column); export const Text = (text: string, line?: number, column?: number) => - createToken(text, "comment.line.double-slash.cs", line, column); + createToken(text, 'comment.line.double-slash.cs', line, column); } } export namespace Identifiers { export const AliasName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.alias.cs", line, column); + createToken(text, 'entity.name.type.alias.cs', line, column); export const ClassName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.class.cs", line, column); + createToken(text, 'entity.name.type.class.cs', line, column); export const InterfaceName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.interface.cs", line, column); + createToken(text, 'entity.name.type.interface.cs', line, column); export const NamespaceName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.type.namespace.cs", line, column); + createToken(text, 'entity.name.type.namespace.cs', line, column); export const PropertyName = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.function.cs", line, column); + createToken(text, 'entity.name.function.cs', line, column); } export namespace Keywords { export namespace Modifiers { export const Abstract = (line?: number, column?: number) => - createToken("abstract", "storage.modifier.cs", line, column); + createToken('abstract', 'storage.modifier.cs', line, column); export const Internal = (line?: number, column?: number) => - createToken("internal", "storage.modifier.cs", line, column); + createToken('internal', 'storage.modifier.cs', line, column); export const New = (line?: number, column?: number) => - createToken("new", "storage.modifier.cs", line, column); + createToken('new', 'storage.modifier.cs', line, column); export const Partial = (line?: number, column?: number) => - createToken("partial", "storage.modifier.cs", line, column); + createToken('partial', 'storage.modifier.cs', line, column); export const Private = (line?: number, column?: number) => - createToken("private", "storage.modifier.cs", line, column); + createToken('private', 'storage.modifier.cs', line, column); export const Protected = (line?: number, column?: number) => - createToken("protected", "storage.modifier.cs", line, column); + createToken('protected', 'storage.modifier.cs', line, column); export const Public = (line?: number, column?: number) => - createToken("public", "storage.modifier.cs", line, column); + createToken('public', 'storage.modifier.cs', line, column); export const Sealed = (line?: number, column?: number) => - createToken("sealed", "storage.modifier.cs", line, column); + createToken('sealed', 'storage.modifier.cs', line, column); export const Static = (line?: number, column?: number) => - createToken("static", "storage.modifier.cs", line, column); + createToken('static', 'storage.modifier.cs', line, column); } export const Alias = (line?: number, column?: number) => - createToken("alias", "keyword.other.alias.cs", line, column); + createToken('alias', 'keyword.other.alias.cs', line, column); export const AttributeSpecifier = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.attribute-specifier.cs", line, column); + createToken(text, 'keyword.other.attribute-specifier.cs', line, column); export const Class = (line?: number, column?: number) => - createToken("class", "keyword.other.class.cs", line, column); + createToken('class', 'keyword.other.class.cs', line, column); export const Extern = (line?: number, column?: number) => - createToken("extern", "keyword.other.extern.cs", line, column); + createToken('extern', 'keyword.other.extern.cs', line, column); export const Interface = (line?: number, column?: number) => - createToken("interface", "keyword.other.interface.cs", line, column); + createToken('interface', 'keyword.other.interface.cs', line, column); export const Namespace = (line?: number, column?: number) => - createToken("namespace", "keyword.other.namespace.cs", line, column); + createToken('namespace', 'keyword.other.namespace.cs', line, column); export const New = (line?: number, column?: number) => - createToken("new", "keyword.other.new.cs", line, column); + createToken('new', 'keyword.other.new.cs', line, column); export const Static = (line?: number, column?: number) => - createToken("static", "keyword.other.static.cs", line, column); + createToken('static', 'keyword.other.static.cs', line, column); export const Struct = (line?: number, column?: number) => - createToken("struct", "keyword.other.struct.cs", line, column); + createToken('struct', 'keyword.other.struct.cs', line, column); export const Using = (line?: number, column?: number) => - createToken("using", "keyword.other.using.cs", line, column); + createToken('using', 'keyword.other.using.cs', line, column); export const Where = (line?: number, column?: number) => - createToken("where", "keyword.other.where.cs", line, column); + createToken('where', 'keyword.other.where.cs', line, column); } export namespace Literals { export namespace Boolean { export const False = (line?: number, column?: number) => - createToken("false", "constant.language.boolean.false.cs", line, column); + createToken('false', 'constant.language.boolean.false.cs', line, column); export const True = (line?: number, column?: number) => - createToken("true", "constant.language.boolean.true.cs", line, column); + createToken('true', 'constant.language.boolean.true.cs', line, column); } export const Null = (line?: number, column?: number) => - createToken("null", "constant.language.null.cs", line, column); + createToken('null', 'constant.language.null.cs', line, column); export namespace Numeric { export const Binary = (text: string, line?: number, column?: number) => - createToken(text, "constant.numeric.binary.cs", line, column); + createToken(text, 'constant.numeric.binary.cs', line, column); export const Decimal = (text: string, line?: number, column?: number) => - createToken(text, "constant.numeric.decimal.cs", line, column); + createToken(text, 'constant.numeric.decimal.cs', line, column); export const Hexadecimal = (text: string, line?: number, column?: number) => - createToken(text, "constant.numeric.hex.cs", line, column); + createToken(text, 'constant.numeric.hex.cs', line, column); } export const String = (text: string, line?: number, column?: number) => - createToken(text, "string.quoted.double.cs", line, column); + createToken(text, 'string.quoted.double.cs', line, column); } export namespace Operators { export const Assignment = (line?: number, column?: number) => - createToken("=", "keyword.operator.assignment.cs", line, column); + createToken('=', 'keyword.operator.assignment.cs', line, column); } export namespace Puncuation { export const Accessor = (line?: number, column?: number) => - createToken(".", "punctuation.accessor.cs", line, column); + createToken('.', 'punctuation.accessor.cs', line, column); export const Colon = (line?: number, column?: number) => - createToken(":", "punctuation.separator.colon.cs", line, column); + createToken(':', 'punctuation.separator.colon.cs', line, column); export const Comma = (line?: number, column?: number) => - createToken(",", "punctuation.separator.comma.cs", line, column); + createToken(',', 'punctuation.separator.comma.cs', line, column); export namespace CurlyBrace { export const Close = (line?: number, column?: number) => - createToken("}", "punctuation.curlybrace.close.cs", line, column); + createToken('}', 'punctuation.curlybrace.close.cs', line, column); export const Open = (line?: number, column?: number) => - createToken("{", "punctuation.curlybrace.open.cs", line, column); + createToken('{', 'punctuation.curlybrace.open.cs', line, column); } export namespace Parenthesis { export const Close = (line?: number, column?: number) => - createToken(")", "punctuation.parenthesis.close.cs", line, column); + createToken(')', 'punctuation.parenthesis.close.cs', line, column); export const Open = (line?: number, column?: number) => - createToken("(", "punctuation.parenthesis.open.cs", line, column); + createToken('(', 'punctuation.parenthesis.open.cs', line, column); } export const Semicolon = (line?: number, column?: number) => - createToken(";", "punctuation.terminator.statement.cs", line, column); + createToken(';', 'punctuation.terminator.statement.cs', line, column); export namespace SquareBracket { export const Close = (line?: number, column?: number) => - createToken("]", "punctuation.squarebracket.close.cs", line, column); + createToken(']', 'punctuation.squarebracket.close.cs', line, column); export const Open = (line?: number, column?: number) => - createToken("[", "punctuation.squarebracket.open.cs", line, column); + createToken('[', 'punctuation.squarebracket.open.cs', line, column); } export namespace String { export const Begin = (line?: number, column?: number) => - createToken('"', "punctuation.definition.string.begin.cs", line, column); + createToken('"', 'punctuation.definition.string.begin.cs', line, column); export const End = (line?: number, column?: number) => - createToken('"', "punctuation.definition.string.end.cs", line, column); + createToken('"', 'punctuation.definition.string.end.cs', line, column); } - export const TypeParametersBegin = (line?: number, column?: number) => - createToken("<", "punctuation.definition.typeparameters.begin.cs", line, column); + export namespace TypeParameters { + export const Begin = (line?: number, column?: number) => + createToken('<', 'punctuation.definition.typeparameters.begin.cs', line, column); - export const TypeParametersEnd = (line?: number, column?: number) => - createToken(">", "punctuation.definition.typeparameters.end.cs", line, column); + export const End = (line?: number, column?: number) => + createToken('>', 'punctuation.definition.typeparameters.end.cs', line, column); + } } export namespace Variables { export const Alias = (text: string, line?: number, column?: number) => - createToken(text, "variable.other.alias.cs", line, column); + createToken(text, 'variable.other.alias.cs', line, column); } export const StorageModifierKeyword = (text: string, line?: number, column?: number) => - createToken(text, "storage.modifier.cs", line, column); + createToken(text, 'storage.modifier.cs', line, column); export const Type = (text: string, line?: number, column?: number) => - createToken(text, "storage.type.cs", line, column); + createToken(text, 'storage.type.cs', line, column); export const Keyword = (text: string, line?: number, column?: number) => - createToken(text, "keyword.other.cs", line, column); + createToken(text, 'keyword.other.cs', line, column); export const FieldIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.variable.cs", line, column); + createToken(text, 'entity.name.variable.cs', line, column); export const StringDoubleQuoted = (text: string, line?: number, column?: number) => - createToken(text, "string.quoted.double.cs", line, column); + createToken(text, 'string.quoted.double.cs', line, column); export const StringDoubleQuotedVerbatim = (text: string, line?: number, column?: number) => - createToken(text, "string.quoted.double.literal.cs", line, column); + createToken(text, 'string.quoted.double.literal.cs', line, column); export const EventIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.variable.cs", line, column); + createToken(text, 'entity.name.variable.cs', line, column); export const LanguageConstant = (text: string, line?: number, column?: number) => - createToken(text, "constant.language.cs", line, column); + createToken(text, 'constant.language.cs', line, column); export const PropertyIdentifier = (text: string, line?: number, column?: number) => - createToken(text, "entity.name.function.cs", line, column); + createToken(text, 'entity.name.function.cs', line, column); export const StringInterpolatedExpression = (text: string, line?: number, column?: number) => - createToken(text, "meta.interpolated.expression.cs", line, column); + createToken(text, 'meta.interpolated.expression.cs', line, column); export const StringStart = (text: string, line?: number, column?: number) => - createToken(text, "punctuation.definition.string.begin.cs", line, column); + createToken(text, 'punctuation.definition.string.begin.cs', line, column); export const StringEnd = (text: string, line?: number, column?: number) => - createToken(text, "punctuation.definition.string.end.cs", line, column); + createToken(text, 'punctuation.definition.string.end.cs', line, column); } diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts index a3ab47f7ec..36352e0e78 100644 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ b/test/syntaxes/utils/tokenizerUtil.ts @@ -1,3 +1,8 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + import { Tokenizer, Token } from './tokenizer'; export class TokenizerUtil From 6a23de38b092ede2121caa6fe43300ced8b596d0 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 18:55:15 -0800 Subject: [PATCH 017/192] Split type-declaration into class-declaration and interface-declaration --- syntaxes/csharp2.json | 64 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 51 insertions(+), 13 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 9b0e03781a..4c0a204988 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -201,7 +201,10 @@ "include": "#namespace-declaration" }, { - "include": "#type-declaration" + "include": "#class-declaration" + }, + { + "include": "#interface-declaration" } ] }, @@ -251,8 +254,8 @@ } ] }, - "type-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class|struct|interface|enum)\\s+)", + "class-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class)\\s+)", "end": "(?<=\\})", "patterns": [ { @@ -271,15 +274,6 @@ } ] }, - { - "begin": "(?=interface)", - "end": "(?=\\{)", - "patterns": [ - { - "include": "#interface-header" - } - ] - }, { "begin": "\\{", "beginCaptures": { @@ -295,7 +289,10 @@ }, "patterns": [ { - "include": "#type-declaration" + "include": "#class-declaration" + }, + { + "include": "#interface-declaration" }, { "include": "#punctuation-semicolon" @@ -340,6 +337,47 @@ } ] }, + "interface-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private|partial)\\s+)*)(?:interface)\\s+)", + "end": "(?<=\\})", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "storage.modifier.cs", + "match": "\\b(new|public|protected|internal|private|partial)\\b" + }, + { + "begin": "(?=interface)", + "end": "(?=\\{)", + "patterns": [ + { + "include": "#interface-header" + } + ] + }, + { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#punctuation-semicolon" + } + ] + } + ] + }, "interface-header": { "patterns": [ { From 973adf288f6ea236cd2506d7d93dd73c6f07ea88 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 19:03:08 -0800 Subject: [PATCH 018/192] Add struct declarations --- syntaxes/csharp2.json | 89 ++++++++++++++++++++++++++++ test/syntaxes/structs.test.syntax.ts | 87 +++++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 3 + 3 files changed, 179 insertions(+) create mode 100644 test/syntaxes/structs.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 4c0a204988..95eed09078 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -205,6 +205,9 @@ }, { "include": "#interface-declaration" + }, + { + "include": "#struct-declaration" } ] }, @@ -414,6 +417,92 @@ } ] }, + "struct-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private|partial)\\s+)*)(?:struct)\\s+)", + "end": "(?<=\\})", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "storage.modifier.cs", + "match": "\\b(new|public|protected|internal|private|partial)\\b" + }, + { + "begin": "(?=struct)", + "end": "(?=\\{)", + "patterns": [ + { + "include": "#struct-header" + } + ] + }, + { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#class-declaration" + }, + { + "include": "#interface-declaration" + }, + { + "include": "#struct-declaration" + }, + { + "include": "#punctuation-semicolon" + } + ] + } + ] + }, + "struct-header": { + "patterns": [ + { + "comment": "C# grammar: struct identifier type-parameter-list[opt]", + "match": "(struct)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.struct.cs" + }, + "2": { + "name": "entity.name.type.struct.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] + } + ] + }, "generic-constraints": { "begin": "(where)\\s+(\\w+)\\s*(:)", "beginCaptures": { diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts new file mode 100644 index 0000000000..ea17953353 --- /dev/null +++ b/test/syntaxes/structs.test.syntax.ts @@ -0,0 +1,87 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Structs", () => { + it("simple simple", () => { + + const input = ` +struct S { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Struct(2, 1), + Tokens.Identifiers.StructName("S", 2, 8), + Tokens.Puncuation.CurlyBrace.Open(2, 10), + Tokens.Puncuation.CurlyBrace.Close(2, 12)]); + }); + + it("struct interface implementation", () => { + + const input = ` +interface IFoo { } +struct S : IFoo { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Interface(2, 1), + Tokens.Identifiers.InterfaceName("IFoo", 2, 11), + Tokens.Puncuation.CurlyBrace.Open(2, 16), + Tokens.Puncuation.CurlyBrace.Close(2, 18), + Tokens.Keywords.Struct(3, 1), + Tokens.Identifiers.StructName("S", 3, 8), + Tokens.Puncuation.Colon(3, 10), + Tokens.Type("IFoo", 3, 12), + Tokens.Puncuation.CurlyBrace.Open(3, 17), + Tokens.Puncuation.CurlyBrace.Close(3, 19)]); + }); + + it("generic struct", () => { + + const input = ` +struct S { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Struct(2, 1), + Tokens.Identifiers.StructName("S", 2, 8), + Tokens.Puncuation.CurlyBrace.Open(2, 18), + Tokens.Puncuation.CurlyBrace.Close(2, 20)]); + }); + + it("generic struct with constraints", () => { + + const input = ` +struct S where T1 : T2 { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Struct(2, 1), + Tokens.Identifiers.StructName("S", 2, 8), + Tokens.Keywords.Where(2, 18), + Tokens.Type("T1", 2, 24), + Tokens.Puncuation.Colon(2, 27), + Tokens.Type("T2", 2, 29), + Tokens.Puncuation.CurlyBrace.Open(2, 32), + Tokens.Puncuation.CurlyBrace.Close(2, 34)]); + }); + }); +}); + + diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 2519709963..dbff90fc08 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -104,6 +104,9 @@ export namespace Tokens { export const PropertyName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.function.cs', line, column); + + export const StructName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.type.struct.cs', line, column); } export namespace Keywords { From cbc65f98f3e45ab46fda49dc8087c3fce17d490d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 19:45:23 -0800 Subject: [PATCH 019/192] Add enum declarations --- syntaxes/csharp2.json | 301 +++++++++++++++++---------- test/syntaxes/enums.test.syntax.ts | 111 ++++++++++ test/syntaxes/structs.test.syntax.ts | 2 +- test/syntaxes/utils/tokenizer.ts | 9 + 4 files changed, 312 insertions(+), 111 deletions(-) create mode 100644 test/syntaxes/enums.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 95eed09078..ac39ad2c19 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -201,13 +201,7 @@ "include": "#namespace-declaration" }, { - "include": "#class-declaration" - }, - { - "include": "#interface-declaration" - }, - { - "include": "#struct-declaration" + "include": "#type-declaration" } ] }, @@ -257,6 +251,22 @@ } ] }, + "type-declaration": { + "patterns": [ + { + "include": "#class-declaration" + }, + { + "include": "#enum-declaration" + }, + { + "include": "#interface-declaration" + }, + { + "include": "#struct-declaration" + } + ] + }, "class-declaration": { "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class)\\s+)", "end": "(?<=\\})", @@ -273,7 +283,36 @@ "end": "(?=\\{)", "patterns": [ { - "include": "#class-header" + "comment": "C# grammar: class identifier type-parameter-list[opt]", + "match": "(class)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.class.cs" + }, + "2": { + "name": "entity.name.type.class.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] } ] }, @@ -292,10 +331,7 @@ }, "patterns": [ { - "include": "#class-declaration" - }, - { - "include": "#interface-declaration" + "include": "#type-declaration" }, { "include": "#punctuation-semicolon" @@ -304,37 +340,88 @@ } ] }, - "class-header": { + "enum-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private)\\s+)*)(?:enum)\\s+)", + "end": "(?<=\\})", "patterns": [ { - "comment": "C# grammar: class identifier type-parameter-list[opt]", - "match": "(class)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.class.cs" - }, - "2": { - "name": "entity.name.type.class.cs" - } - } + "include": "#comment" }, { - "include": "#generic-constraints" + "name": "storage.modifier.cs", + "match": "\\b(new|public|protected|internal|private)\\b" }, { - "begin": ":", + "begin": "(?=enum)", + "end": "(?=\\{)", + "patterns": [ + { + "comment": "C# grammar: enum identifier", + "match": "(enum)\\s+([_$[:alpha:]][_$[:alnum:]]*)", + "captures": { + "1": { + "name": "keyword.other.enum.cs" + }, + "2": { + "name": "entity.name.type.enum.cs" + } + } + }, + { + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{)", + "patterns": [ + { + "include": "#type" + } + ] + } + ] + }, + { + "begin": "\\{", "beginCaptures": { "0": { - "name": "punctuation.separator.colon.cs" + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" } }, - "end": "(?=\\{|where)", "patterns": [ { - "include": "#type" + "include": "#comment" + }, + { + "include": "#attribute-section" }, { "include": "#punctuation-comma" + }, + { + "begin": "[_$[:alpha:]][_$[:alnum:]]*", + "beginCaptures": { + "0": { + "name": "variable.other.enummember.cs" + } + }, + "end": "(?=(,|\\}))", + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#variable-initializer" + } + ] } ] } @@ -356,7 +443,36 @@ "end": "(?=\\{)", "patterns": [ { - "include": "#interface-header" + "comment": "C# grammar: interface identifier variant-type-parameter-list[opt]", + "match": "(interface)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.interface.cs" + }, + "2": { + "name": "entity.name.type.interface.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] } ] }, @@ -381,42 +497,6 @@ } ] }, - "interface-header": { - "patterns": [ - { - "comment": "C# grammar: interface identifier variant-type-parameter-list[opt]", - "match": "(interface)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.interface.cs" - }, - "2": { - "name": "entity.name.type.interface.cs" - } - } - }, - { - "include": "#generic-constraints" - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where)", - "patterns": [ - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, "struct-declaration": { "begin": "(?=(?:((new|public|protected|internal|private|partial)\\s+)*)(?:struct)\\s+)", "end": "(?<=\\})", @@ -433,7 +513,36 @@ "end": "(?=\\{)", "patterns": [ { - "include": "#struct-header" + "comment": "C# grammar: struct identifier type-parameter-list[opt]", + "match": "(struct)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", + "captures": { + "1": { + "name": "keyword.other.struct.cs" + }, + "2": { + "name": "entity.name.type.struct.cs" + } + } + }, + { + "include": "#generic-constraints" + }, + { + "begin": ":", + "beginCaptures": { + "0": { + "name": "punctuation.separator.colon.cs" + } + }, + "end": "(?=\\{|where)", + "patterns": [ + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] } ] }, @@ -452,13 +561,7 @@ }, "patterns": [ { - "include": "#class-declaration" - }, - { - "include": "#interface-declaration" - }, - { - "include": "#struct-declaration" + "include": "#type-declaration" }, { "include": "#punctuation-semicolon" @@ -467,42 +570,6 @@ } ] }, - "struct-header": { - "patterns": [ - { - "comment": "C# grammar: struct identifier type-parameter-list[opt]", - "match": "(struct)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.struct.cs" - }, - "2": { - "name": "entity.name.type.struct.cs" - } - } - }, - { - "include": "#generic-constraints" - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where)", - "patterns": [ - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, "generic-constraints": { "begin": "(where)\\s+(\\w+)\\s*(:)", "beginCaptures": { @@ -551,6 +618,20 @@ } ] }, + "variable-initializer": { + "begin": "(? { + before(() => should()); + + describe("Enums", () => { + it("simple enum", () => { + + const input = ` +enum E { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Enum(2, 1), + Tokens.Identifiers.EnumName("E", 2, 6), + Tokens.Puncuation.CurlyBrace.Open(2, 8), + Tokens.Puncuation.CurlyBrace.Close(2, 10)]); + }); + + it("enum with base type", () => { + + const input = ` +enum E : byte { } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Enum(2, 1), + Tokens.Identifiers.EnumName("E", 2, 6), + Tokens.Puncuation.Colon(2, 8), + Tokens.Type("byte", 2, 10), + Tokens.Puncuation.CurlyBrace.Open(2, 15), + Tokens.Puncuation.CurlyBrace.Close(2, 17)]); + }); + + it("enum with single member", () => { + + const input = ` +enum E { M1 } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Enum(2, 1), + Tokens.Identifiers.EnumName("E", 2, 6), + Tokens.Puncuation.CurlyBrace.Open(2, 8), + Tokens.Variables.EnumMember("M1", 2, 10), + Tokens.Puncuation.CurlyBrace.Close(2, 13)]); + }); + + it("enum with multiple members", () => { + + const input = ` +enum Color { Red, Green, Blue } +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Enum(2, 1), + Tokens.Identifiers.EnumName("Color", 2, 6), + Tokens.Puncuation.CurlyBrace.Open(2, 12), + Tokens.Variables.EnumMember("Red", 2, 14), + Tokens.Puncuation.Comma(2, 17), + Tokens.Variables.EnumMember("Green", 2, 19), + Tokens.Puncuation.Comma(2, 24), + Tokens.Variables.EnumMember("Blue", 2, 26), + Tokens.Puncuation.CurlyBrace.Close(2, 31)]); + }); + + it("enum with initialized member", () => { + + const input = ` +enum E +{ + Value1 = 1, + Value2, + Value3 +} +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Enum(2, 1), + Tokens.Identifiers.EnumName("E", 2, 6), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Variables.EnumMember("Value1", 4, 5), + Tokens.Operators.Assignment(4, 12), + Tokens.Literals.Numeric.Decimal("1", 4, 14), + Tokens.Puncuation.Comma(4, 15), + Tokens.Variables.EnumMember("Value2", 5, 5), + Tokens.Puncuation.Comma(5, 11), + Tokens.Variables.EnumMember("Value3", 6, 5), + Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + }); + }); +}); + + diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index ea17953353..c385bbfa2f 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -11,7 +11,7 @@ describe("Grammar", () => { before(() => should()); describe("Structs", () => { - it("simple simple", () => { + it("simple struct", () => { const input = ` struct S { } diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index dbff90fc08..85df75b842 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -96,6 +96,9 @@ export namespace Tokens { export const ClassName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.class.cs', line, column); + export const EnumName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.type.enum.cs', line, column); + export const InterfaceName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.interface.cs', line, column); @@ -148,6 +151,9 @@ export namespace Tokens { export const Class = (line?: number, column?: number) => createToken('class', 'keyword.other.class.cs', line, column); + export const Enum = (line?: number, column?: number) => + createToken('enum', 'keyword.other.enum.cs', line, column); + export const Extern = (line?: number, column?: number) => createToken('extern', 'keyword.other.extern.cs', line, column); @@ -262,6 +268,9 @@ export namespace Tokens { export namespace Variables { export const Alias = (text: string, line?: number, column?: number) => createToken(text, 'variable.other.alias.cs', line, column); + + export const EnumMember = (text: string, line?: number, column?: number) => + createToken(text, 'variable.other.enummember.cs', line, column); } export const StorageModifierKeyword = (text: string, line?: number, column?: number) => From 2af4bf23cd77643dc5caed6a64979c640e1a5b1a Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 20:51:03 -0800 Subject: [PATCH 020/192] Add delegate declarations --- syntaxes/csharp2.json | 94 +++++++++++++++++++++- test/syntaxes/delegates.test.syntax.ts | 104 +++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 20 ++++- 3 files changed, 216 insertions(+), 2 deletions(-) create mode 100644 test/syntaxes/delegates.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index ac39ad2c19..b2a2945068 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -202,6 +202,9 @@ }, { "include": "#type-declaration" + }, + { + "include": "#punctuation-semicolon" } ] }, @@ -256,6 +259,9 @@ { "include": "#class-declaration" }, + { + "include": "#delegate-declaration" + }, { "include": "#enum-declaration" }, @@ -340,6 +346,48 @@ } ] }, + "delegate-declaration": { + "begin": "(?=(?:((new|public|protected|internal|private)\\s+)*)(?:delegate)\\s+)", + "end": "(?=;)", + "patterns": [ + { + "include": "#comment" + }, + { + "name": "storage.modifier.cs", + "match": "\\b(new|public|protected|internal|private)\\b" + }, + { + "begin": "\\b(delegate)\\b\\s+", + "beginCaptures": { + "1": { + "name": "keyword.other.delegate.cs" + } + }, + "end": "(?=\\()", + "patterns": [ + { + "comment": "C# grammar: identifier variant-type-parameter-list[opt] (", + "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)\\s*(?=\\()", + "captures": { + "1": { + "name": "entity.name.type.delegate.cs" + } + } + }, + { + "include": "#type" + } + ] + }, + { + "include": "#parenthesized-parameter-list" + }, + { + "include": "#generic-constraints" + } + ] + }, "enum-declaration": { "begin": "(?=(?:((new|public|protected|internal|private)\\s+)*)(?:enum)\\s+)", "end": "(?<=\\})", @@ -583,7 +631,7 @@ "name": "punctuation.separator.colon.cs" } }, - "end": "(?=\\{|where)", + "end": "(?=\\{|where|;)", "patterns": [ { "name": "keyword.other.class.cs", @@ -704,6 +752,50 @@ "match": "\\b[0-9_]+(F|f|D|d|M|m)" } ] + }, + "parenthesized-parameter-list": { + "begin": "(?=(\\())", + "beginCaptures": { + "1": { + "name": "punctuation.parenthesis.open.cs" + } + }, + "end": "(?=(\\)))", + "endCaptures": { + "1": { + "name": "punctuation.parenthesis.close.cs" + } + }, + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#attribute-section" + }, + { + "match": "\\b(ref|params|out)\\b", + "name": "storage.modifier.cs" + }, + { + "comment": "parameter name", + "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=[,)])", + "captures": { + "1": { + "name": "variable.parameter.cs" + } + } + }, + { + "include": "#variable-initializer" + }, + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] }, "string-literal": { "name": "string.quoted.double.cs", diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts new file mode 100644 index 0000000000..df13ef79da --- /dev/null +++ b/test/syntaxes/delegates.test.syntax.ts @@ -0,0 +1,104 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Delegates", () => { + it("void delegate with no parameters", () => { + + const input = ` +delegate void D(); +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Delegate(2, 1), + Tokens.Type("void", 2, 10), + Tokens.Identifiers.DelegateName("D", 2, 15), + Tokens.Puncuation.Parenthesis.Open(2, 16), + Tokens.Puncuation.Parenthesis.Close(2, 17), + Tokens.Puncuation.Semicolon(2, 18)]); + }); + + it("generic delegate with variance", () => { + + const input = ` +delegate TResult D(T arg1); +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Delegate(2, 1), + Tokens.Type("TResult", 2, 10), + Tokens.Identifiers.DelegateName("D", 2, 18), + Tokens.Puncuation.Parenthesis.Open(2, 38), + Tokens.Type("T", 2, 39), + Tokens.Variables.Parameter("arg1", 2, 41), + Tokens.Puncuation.Parenthesis.Close(2, 45), + Tokens.Puncuation.Semicolon(2, 46)]); + }); + + it("generic delegate with constraints", () => { + + const input = ` +delegate void D() + where T1 : T2; +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Delegate(2, 1), + Tokens.Type("void", 2, 10), + Tokens.Identifiers.DelegateName("D", 2, 15), + Tokens.Puncuation.Parenthesis.Open(2, 24), + Tokens.Puncuation.Parenthesis.Close(2, 25), + Tokens.Keywords.Where(3, 5), + Tokens.Type("T1", 3, 11), + Tokens.Puncuation.Colon(3, 14), + Tokens.Type("T2", 3, 16), + Tokens.Puncuation.Semicolon(3, 18)]); + }); + + it("delegate with multiple parameters", () => { + + const input = ` +delegate int D(ref string x, out int y, params object[] z); +`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Delegate(2, 1), + Tokens.Type("int", 2, 10), + Tokens.Identifiers.DelegateName("D", 2, 14), + Tokens.Puncuation.Parenthesis.Open(2, 15), + Tokens.Keywords.Modifiers.Ref(2, 16), + Tokens.Type("string", 2, 20), + Tokens.Variables.Parameter("x", 2, 27), + Tokens.Puncuation.Comma(2, 28), + Tokens.Keywords.Modifiers.Out(2, 30), + Tokens.Type("int", 2, 34), + Tokens.Variables.Parameter("y", 2, 38), + Tokens.Puncuation.Comma(2, 39), + Tokens.Keywords.Modifiers.Params(2, 41), + Tokens.Type("object", 2, 48), + Tokens.Puncuation.SquareBracket.Open(2, 54), + Tokens.Puncuation.SquareBracket.Close(2, 55), + Tokens.Variables.Parameter("z", 2, 57), + Tokens.Puncuation.Parenthesis.Close(2, 58), + Tokens.Puncuation.Semicolon(2, 59)]); + }); + }); +}); + + diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 85df75b842..ec8ff9ee63 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -96,6 +96,9 @@ export namespace Tokens { export const ClassName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.class.cs', line, column); + export const DelegateName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.type.delegate.cs', line, column); + export const EnumName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.enum.cs', line, column); @@ -123,6 +126,12 @@ export namespace Tokens { export const New = (line?: number, column?: number) => createToken('new', 'storage.modifier.cs', line, column); + export const Out = (line?: number, column?: number) => + createToken('out', 'storage.modifier.cs', line, column); + + export const Params = (line?: number, column?: number) => + createToken('params', 'storage.modifier.cs', line, column); + export const Partial = (line?: number, column?: number) => createToken('partial', 'storage.modifier.cs', line, column); @@ -135,6 +144,9 @@ export namespace Tokens { export const Public = (line?: number, column?: number) => createToken('public', 'storage.modifier.cs', line, column); + export const Ref = (line?: number, column?: number) => + createToken('ref', 'storage.modifier.cs', line, column); + export const Sealed = (line?: number, column?: number) => createToken('sealed', 'storage.modifier.cs', line, column); @@ -151,6 +163,9 @@ export namespace Tokens { export const Class = (line?: number, column?: number) => createToken('class', 'keyword.other.class.cs', line, column); + export const Delegate = (line?: number, column?: number) => + createToken('delegate', 'keyword.other.delegate.cs', line, column); + export const Enum = (line?: number, column?: number) => createToken('enum', 'keyword.other.enum.cs', line, column); @@ -268,9 +283,12 @@ export namespace Tokens { export namespace Variables { export const Alias = (text: string, line?: number, column?: number) => createToken(text, 'variable.other.alias.cs', line, column); - + export const EnumMember = (text: string, line?: number, column?: number) => createToken(text, 'variable.other.enummember.cs', line, column); + + export const Parameter = (text: string, line?: number, column?: number) => + createToken(text, 'variable.parameter.cs', line, column); } export const StorageModifierKeyword = (text: string, line?: number, column?: number) => From d0ecb1aea0aad3f3ac02d47f4dd43728d82434fa Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 26 Dec 2016 22:48:05 -0800 Subject: [PATCH 021/192] Add field declarations --- syntaxes/csharp2.json | 64 +++++++- test/syntaxes/field.test.syntax.ts | 141 ---------------- test/syntaxes/fields.test.syntax.ts | 231 +++++++++++++++++++++++++++ test/syntaxes/utils/tokenizer.ts | 24 +-- test/syntaxes/utils/tokenizerUtil.ts | 8 +- 5 files changed, 313 insertions(+), 155 deletions(-) delete mode 100644 test/syntaxes/field.test.syntax.ts create mode 100644 test/syntaxes/fields.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index b2a2945068..751b3f3daf 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -339,6 +339,9 @@ { "include": "#type-declaration" }, + { + "include": "#field-declaration" + }, { "include": "#punctuation-semicolon" } @@ -611,6 +614,9 @@ { "include": "#type-declaration" }, + { + "include": "#field-declaration" + }, { "include": "#punctuation-semicolon" } @@ -666,8 +672,50 @@ } ] }, + "field-declaration": { + "begin": "(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:;|=))", + "end": "(?=;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "\\b((?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=;|=)", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(new|public|protected|internal|private|static|readonly|volatile|const)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "3": { + "name": "entity.name.variable.field.cs" + } + } + }, + { + "include": "#variable-initializer" + }, + { + "include": "#expression-body" + } + ] + }, "variable-initializer": { - "begin": "(?)", "beginCaptures": { "1": { "name": "keyword.operator.assignment.cs" @@ -680,6 +728,20 @@ } ] }, + "expression-body": { + "begin": "(=>)", + "beginCaptures": { + "1": { + "name": "keyword.operator.arrow.cs" + } + }, + "end": "(?=[,\\);}])", + "patterns": [ + { + "include": "#expression" + } + ] + }, "expression": { "patterns": [ { diff --git a/test/syntaxes/field.test.syntax.ts b/test/syntaxes/field.test.syntax.ts deleted file mode 100644 index dd00ef393b..0000000000 --- a/test/syntaxes/field.test.syntax.ts +++ /dev/null @@ -1,141 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Field", function() { - it("declaration", function() { - -const input = ` -public class Tester -{ - private List _field; - private List field; - private List field123; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("List", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 18)); - - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 18)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 6, 18)); - }); - - it("generic", function () { - - const input = ` -public class Tester -{ - private Dictionary< List, Dictionary> _field; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary", 4, 13)); - tokens.should.contain(Tokens.Type("List", 4, 25)); - tokens.should.contain(Tokens.Type("Dictionary", 4, 34)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 52)); - }); - - - it("modifiers", function() { - -const input = ` -public class Tester -{ - private static readonly List _field; - readonly string _field2; - string _field3; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("static", 4, 13)); - tokens.should.contain(Tokens.StorageModifierKeyword("readonly", 4, 20)); - tokens.should.contain(Tokens.Type("List", 4, 29)); - tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 34)); - - tokens.should.contain(Tokens.FieldIdentifier("_field2", 5, 21)); - - tokens.should.contain(Tokens.FieldIdentifier("_field3", 6, 12)); - }); - - it("types", function() { - -const input = ` -public class Tester -{ - string field123; - string[] field123; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.Type("string", 4, 5)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 4, 12)); - - tokens.should.contain(Tokens.Type("string[]", 5, 5)); - tokens.should.contain(Tokens.FieldIdentifier("field123", 5, 14)); - }); - - it("assignment", function() { - -const input = ` -public class Tester -{ - private string field = "hello"; - const bool field = true; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("string", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 29)); - - tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5)); - tokens.should.contain(Tokens.Type("bool", 5, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20)); - tokens.should.contain(Tokens.LanguageConstant("true", 5, 28)); - }); - - it("expression body", function() { - -const input = ` -public class Tester -{ - private string field => "hello"; - const bool field => true; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5)); - tokens.should.contain(Tokens.Type("string", 4, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 30)); - - tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5)); - tokens.should.contain(Tokens.Type("bool", 5, 13)); - tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20)); - tokens.should.contain(Tokens.LanguageConstant("true", 5, 29)); - }); - }); -}); - - diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts new file mode 100644 index 0000000000..3470203908 --- /dev/null +++ b/test/syntaxes/fields.test.syntax.ts @@ -0,0 +1,231 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Field", function () { + it("declaration", function () { + + const input = ` +public class Tester +{ + private List _field; + private List field; + private List field123; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Type("List", 4, 13), + Tokens.Identifiers.FieldName("_field", 4, 18), + Tokens.Puncuation.Semicolon(4, 24), + + Tokens.Keywords.Modifiers.Private(5, 5), + Tokens.Type("List", 5, 13), + Tokens.Identifiers.FieldName("field", 5, 18), + Tokens.Puncuation.Semicolon(5, 23), + + Tokens.Keywords.Modifiers.Private(6, 5), + Tokens.Type("List", 6, 13), + Tokens.Identifiers.FieldName("field123", 6, 18), + Tokens.Puncuation.Semicolon(6, 26), + + Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + }); + + it("generic", () => { + + const input = ` +public class Tester +{ + private Dictionary< List, Dictionary> _field; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Type("Dictionary", 4, 13), + Tokens.Puncuation.TypeParameters.Begin(4, 23), + Tokens.Type("List", 4, 25), + Tokens.Puncuation.TypeParameters.Begin(4, 29), + Tokens.Type("T", 4, 30), + Tokens.Puncuation.TypeParameters.End(4, 31), + Tokens.Puncuation.Comma(4, 32), + Tokens.Type("Dictionary", 4, 34), + Tokens.Puncuation.TypeParameters.Begin(4, 44), + Tokens.Type("T", 4, 45), + Tokens.Puncuation.Comma(4, 46), + Tokens.Type("D", 4, 48), + Tokens.Puncuation.TypeParameters.End(4, 49), + Tokens.Puncuation.TypeParameters.End(4, 50), + Tokens.Identifiers.FieldName("_field", 4, 52), + Tokens.Puncuation.Semicolon(4, 58), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + + it("modifiers", () => { + + const input = ` +public class Tester +{ + private static readonly List _field; + readonly string _field2; + string _field3; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Keywords.Modifiers.Static(4, 13), + Tokens.Keywords.Modifiers.ReadOnly(4, 20), + Tokens.Type("List", 4, 29), + Tokens.Identifiers.FieldName("_field", 4, 34), + Tokens.Puncuation.Semicolon(4, 40), + + Tokens.Keywords.Modifiers.ReadOnly(5, 5), + Tokens.Type("string", 5, 14), + Tokens.Identifiers.FieldName("_field2", 5, 21), + Tokens.Puncuation.Semicolon(5, 28), + + Tokens.Type("string", 6, 5), + Tokens.Identifiers.FieldName("_field3", 6, 12), + Tokens.Puncuation.Semicolon(6, 19), + + Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + }); + + it("types", () => { + + const input = ` +public class Tester +{ + string field123; + string[] field123; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("field123", 4, 12), + Tokens.Puncuation.Semicolon(4, 20), + + Tokens.Type("string", 5, 5), + Tokens.Puncuation.SquareBracket.Open(5, 11), + Tokens.Puncuation.SquareBracket.Close(5, 12), + Tokens.Identifiers.FieldName("field123", 5, 14), + Tokens.Puncuation.Semicolon(5, 22), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + + it("assignment", () => { + + const input = ` +public class Tester +{ + private string field = "hello"; + const bool field = true; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Type("string", 4, 13), + Tokens.Identifiers.FieldName("field", 4, 20), + Tokens.Operators.Assignment(4, 26), + Tokens.Puncuation.String.Begin(4, 28), + Tokens.Literals.String("hello", 4, 29), + Tokens.Puncuation.String.End(4, 34), + Tokens.Puncuation.Semicolon(4, 35), + + Tokens.Keywords.Modifiers.Const(5, 5), + Tokens.Type("bool", 5, 13), + Tokens.Identifiers.FieldName("field", 5, 20), + Tokens.Operators.Assignment(5, 26), + Tokens.Literals.Boolean.True(5, 28), + Tokens.Puncuation.Semicolon(5, 32), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + + it("expression body", () => { + + // TODO: Make this a property rather a field. Also, 'const' isn't legal since this is actually a property. + + const input = ` +public class Tester +{ + private string field => "hello"; + const bool field => true; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Type("string", 4, 13), + Tokens.Identifiers.FieldName("field", 4, 20), + Tokens.Operators.Arrow(4, 26), + Tokens.Puncuation.String.Begin(4, 29), + Tokens.Literals.String("hello", 4, 30), + Tokens.Puncuation.String.End(4, 35), + Tokens.Puncuation.Semicolon(4, 36), + + Tokens.Keywords.Modifiers.Const(5, 5), + Tokens.Type("bool", 5, 13), + Tokens.Identifiers.FieldName("field", 5, 20), + Tokens.Operators.Arrow(5, 26), + Tokens.Literals.Boolean.True(5, 29), + Tokens.Puncuation.Semicolon(5, 33), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + }); +}); + + diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index ec8ff9ee63..f3089bac8c 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -9,13 +9,13 @@ export class Tokenizer { private _registry: Registry; private _grammar: IGrammar; - private static readonly _excludedTypes: string[] = [ 'source.cs', 'meta.type.parameters.cs' ]; + private static readonly _excludedTypes: string[] = ['source.cs', 'meta.type.parameters.cs']; constructor(grammarFilePath: string) { this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); } - public tokenize(input: string): Token[] { + public tokenize(input: string, excludeTypes?: boolean): Token[] { let tokens: Token[] = []; // ensure consistent line-endings irrelevant of OS @@ -35,7 +35,7 @@ export class Tokenizer { const text = line.substring(token.startIndex, token.endIndex); const type: string = token.scopes[token.scopes.length - 1]; - if (Tokenizer._excludedTypes.indexOf(type) < 0) { + if (excludeTypes === false || Tokenizer._excludedTypes.indexOf(type) < 0) { tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); } } @@ -102,6 +102,9 @@ export namespace Tokens { export const EnumName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.enum.cs', line, column); + export const FieldName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.variable.field.cs', line, column); + export const InterfaceName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.interface.cs', line, column); @@ -120,6 +123,9 @@ export namespace Tokens { export const Abstract = (line?: number, column?: number) => createToken('abstract', 'storage.modifier.cs', line, column); + export const Const = (line?: number, column?: number) => + createToken('const', 'storage.modifier.cs', line, column); + export const Internal = (line?: number, column?: number) => createToken('internal', 'storage.modifier.cs', line, column); @@ -144,6 +150,9 @@ export namespace Tokens { export const Public = (line?: number, column?: number) => createToken('public', 'storage.modifier.cs', line, column); + export const ReadOnly = (line?: number, column?: number) => + createToken('readonly', 'storage.modifier.cs', line, column); + export const Ref = (line?: number, column?: number) => createToken('ref', 'storage.modifier.cs', line, column); @@ -222,6 +231,9 @@ export namespace Tokens { } export namespace Operators { + export const Arrow = (line?: number, column?: number) => + createToken('=>', 'keyword.operator.arrow.cs', line, column); + export const Assignment = (line?: number, column?: number) => createToken('=', 'keyword.operator.assignment.cs', line, column); } @@ -300,9 +312,6 @@ export namespace Tokens { export const Keyword = (text: string, line?: number, column?: number) => createToken(text, 'keyword.other.cs', line, column); - export const FieldIdentifier = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.variable.cs', line, column); - export const StringDoubleQuoted = (text: string, line?: number, column?: number) => createToken(text, 'string.quoted.double.cs', line, column); @@ -312,9 +321,6 @@ export namespace Tokens { export const EventIdentifier = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.variable.cs', line, column); - export const LanguageConstant = (text: string, line?: number, column?: number) => - createToken(text, 'constant.language.cs', line, column); - export const PropertyIdentifier = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.function.cs', line, column); diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts index 36352e0e78..9928808447 100644 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ b/test/syntaxes/utils/tokenizerUtil.ts @@ -10,11 +10,11 @@ export class TokenizerUtil private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp.json"); private static _tokenizer2: Tokenizer = new Tokenizer("syntaxes/csharp2.json"); - public static tokenize(input: string): Token[] { - return TokenizerUtil._tokenizer.tokenize(input); + public static tokenize(input: string, excludeTypes: boolean = true): Token[] { + return TokenizerUtil._tokenizer.tokenize(input, excludeTypes); } - public static tokenize2(input: string): Token[] { - return TokenizerUtil._tokenizer2.tokenize(input); + public static tokenize2(input: string, excludeTypes: boolean = true): Token[] { + return TokenizerUtil._tokenizer2.tokenize(input, excludeTypes); } } From c9e65f5df44002463f8f203948bf7a09c92936af Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 01:01:48 -0800 Subject: [PATCH 022/192] Add property declarations --- syntaxes/csharp2.json | 179 ++++++++++++++- test/syntaxes/fields.test.syntax.ts | 38 ---- test/syntaxes/properties.test.syntax.ts | 291 ++++++++++++++++++++++++ test/syntaxes/property.test.syntax.ts | 142 ------------ test/syntaxes/utils/tokenizer.ts | 14 +- 5 files changed, 474 insertions(+), 190 deletions(-) create mode 100644 test/syntaxes/properties.test.syntax.ts delete mode 100644 test/syntaxes/property.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 751b3f3daf..266ca921b6 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -182,7 +182,7 @@ "begin": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(?==)", "beginCaptures": { "1": { - "name": "entity.name.function.cs" + "name": "entity.name.variable.property.cs" } }, "end": "(?=(,|\\)))", @@ -339,6 +339,12 @@ { "include": "#type-declaration" }, + { + "include": "#property-declaration" + }, + { + "include": "#variable-initializer" + }, { "include": "#field-declaration" }, @@ -614,6 +620,12 @@ { "include": "#type-declaration" }, + { + "include": "#property-declaration" + }, + { + "include": "#variable-initializer" + }, { "include": "#field-declaration" }, @@ -673,7 +685,7 @@ ] }, "field-declaration": { - "begin": "(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:;|=))", + "begin": "(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { @@ -708,9 +720,146 @@ }, { "include": "#variable-initializer" + } + ] + }, + "property-declaration": { + "begin": "(?=(?!.*\\b(?:class|interface|struct|enum)\\b)(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "\\b((?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=(\\{|=>|$))", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "3": { + "name": "entity.name.variable.property.cs" + } + } + }, + { + "include": "#property-accessors" }, { "include": "#expression-body" + }, + { + "include": "#variable-initializer" + } + ] + }, + "property-accessors": { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "match": "\\b((?:(?:private|protected|internal)\\s+)*)\\b(get|set)\\s*(;)", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(private|protected|internal)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "match": "get", + "name": "keyword.other.get.cs" + }, + { + "match": "set", + "name": "keyword.other.set.cs" + } + ] + }, + "3": { + "patterns": [ + { + "include": "#punctuation-semicolon" + } + ] + } + } + }, + { + "begin": "\\b((?:(?:private|protected|internal)\\s+)*)\\b(get|set)\\b\\s*(\\{)", + "beginCaptures": { + "1": { + "patterns": [ + { + "match": "\\b(private|protected|internal)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "match": "get", + "name": "keyword.other.get.cs" + }, + { + "match": "set", + "name": "keyword.other.set.cs" + } + ] + }, + "3": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#punctuation-semicolon" + } + ] } ] }, @@ -746,6 +895,9 @@ "patterns": [ { "include": "#literal" + }, + { + "include": "#object-creation-expression" } ] }, @@ -815,7 +967,28 @@ } ] }, - "parenthesized-parameter-list": { + "object-creation-expression": { + "begin": "(new)\\s+([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*(?=\\()", + "beginCaptures": { + "1": { + "name": "keyword.other.new.cs" + }, + "2": { + "patterns": [ + { + "include": "#type" + } + ] + } + }, + "end": "(?<=\\))", + "patterns": [ + { + "include": "#parenthesized-parameter-list" + } + ] + }, + "parenthesized-parameter-list": { "begin": "(?=(\\())", "beginCaptures": { "1": { diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 3470203908..f27f580547 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -187,44 +187,6 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); - - it("expression body", () => { - - // TODO: Make this a property rather a field. Also, 'const' isn't legal since this is actually a property. - - const input = ` -public class Tester -{ - private string field => "hello"; - const bool field => true; -}`; - - let tokens = TokenizerUtil.tokenize2(input); - - tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Type("string", 4, 13), - Tokens.Identifiers.FieldName("field", 4, 20), - Tokens.Operators.Arrow(4, 26), - Tokens.Puncuation.String.Begin(4, 29), - Tokens.Literals.String("hello", 4, 30), - Tokens.Puncuation.String.End(4, 35), - Tokens.Puncuation.Semicolon(4, 36), - - Tokens.Keywords.Modifiers.Const(5, 5), - Tokens.Type("bool", 5, 13), - Tokens.Identifiers.FieldName("field", 5, 20), - Tokens.Operators.Arrow(5, 26), - Tokens.Literals.Boolean.True(5, 29), - Tokens.Puncuation.Semicolon(5, 33), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); - }); }); }); diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts new file mode 100644 index 0000000000..bb8cc3fb64 --- /dev/null +++ b/test/syntaxes/properties.test.syntax.ts @@ -0,0 +1,291 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Property", () => { + it("declaration", () => { + + const input = ` +class Tester +{ + public IBooom Property + { + get { return null; } + set { something = value; } + } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("IBooom", 4, 12), + Tokens.Identifiers.PropertyName("Property", 4, 19), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Get(6, 9), + Tokens.Puncuation.CurlyBrace.Open(6, 13), + Tokens.Puncuation.Semicolon(6, 26), + Tokens.Puncuation.CurlyBrace.Close(6, 28), + Tokens.Keywords.Set(7, 9), + Tokens.Puncuation.CurlyBrace.Open(7, 13), + Tokens.Puncuation.Semicolon(7, 32), + Tokens.Puncuation.CurlyBrace.Close(7, 34), + Tokens.Puncuation.CurlyBrace.Close(8, 5), + + Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + }); + + it("declaration single line", () => { + + const input = ` +class Tester +{ + public IBooom Property { get { return null; } private set { something = value; } } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("IBooom", 4, 12), + Tokens.Identifiers.PropertyName("Property", 4, 19), + Tokens.Puncuation.CurlyBrace.Open(4, 28), + Tokens.Keywords.Get(4, 30), + Tokens.Puncuation.CurlyBrace.Open(4, 34), + Tokens.Puncuation.Semicolon(4, 47), + Tokens.Puncuation.CurlyBrace.Close(4, 49), + Tokens.Keywords.Modifiers.Private(4, 51), + Tokens.Keywords.Set(4, 59), + Tokens.Puncuation.CurlyBrace.Open(4, 63), + Tokens.Puncuation.Semicolon(4, 82), + Tokens.Puncuation.CurlyBrace.Close(4, 84), + Tokens.Puncuation.CurlyBrace.Close(4, 86), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + + it("declaration without modifiers", () => { + + const input = ` +class Tester +{ + IBooom Property {get; set;} +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("IBooom", 4, 5), + Tokens.Identifiers.PropertyName("Property", 4, 12), + Tokens.Puncuation.CurlyBrace.Open(4, 21), + Tokens.Keywords.Get(4, 22), + Tokens.Puncuation.Semicolon(4, 25), + Tokens.Keywords.Set(4, 27), + Tokens.Puncuation.Semicolon(4, 30), + Tokens.Puncuation.CurlyBrace.Close(4, 31), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("auto-property single line", function () { + + const input = ` +class Tester +{ + public IBooom Property { get; set; } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("IBooom", 4, 12), + Tokens.Identifiers.PropertyName("Property", 4, 19), + Tokens.Puncuation.CurlyBrace.Open(4, 28), + Tokens.Keywords.Get(4, 30), + Tokens.Puncuation.Semicolon(4, 33), + Tokens.Keywords.Set(4, 35), + Tokens.Puncuation.Semicolon(4, 38), + Tokens.Puncuation.CurlyBrace.Close(4, 40), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("auto-property", () => { + + const input = ` +class Tester +{ + public IBooom Property + { + get; + set; + } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("IBooom", 4, 12), + Tokens.Identifiers.PropertyName("Property", 4, 19), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Get(6, 9), + Tokens.Puncuation.Semicolon(6, 12), + Tokens.Keywords.Set(7, 9), + Tokens.Puncuation.Semicolon(7, 12), + Tokens.Puncuation.CurlyBrace.Close(8, 5), + + Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + }); + + it("generic auto-property", () => { + + const input = ` +class Tester +{ + public Dictionary[]> Property { get; set; } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("Dictionary", 4, 12), + Tokens.Puncuation.TypeParameters.Begin(4, 22), + Tokens.Type("string", 4, 23), + Tokens.Puncuation.Comma(4, 29), + Tokens.Type("List", 4, 31), + Tokens.Puncuation.TypeParameters.Begin(4, 35), + Tokens.Type("T", 4, 36), + Tokens.Puncuation.TypeParameters.End(4, 37), + Tokens.Puncuation.SquareBracket.Open(4, 38), + Tokens.Puncuation.SquareBracket.Close(4, 39), + Tokens.Puncuation.TypeParameters.End(4, 40), + Tokens.Identifiers.PropertyName("Property", 4, 42), + Tokens.Puncuation.CurlyBrace.Open(4, 51), + Tokens.Keywords.Get(4, 53), + Tokens.Puncuation.Semicolon(4, 56), + Tokens.Keywords.Set(4, 58), + Tokens.Puncuation.Semicolon(4, 61), + Tokens.Puncuation.CurlyBrace.Close(4, 63), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("auto-property initializer", () => { + + const input = ` +class Tester +{ + public Dictionary[]> Property { get; } = new Dictionary[]>(); +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("Dictionary", 4, 12), + Tokens.Puncuation.TypeParameters.Begin(4, 22), + Tokens.Type("string", 4, 23), + Tokens.Puncuation.Comma(4, 29), + Tokens.Type("List", 4, 31), + Tokens.Puncuation.TypeParameters.Begin(4, 35), + Tokens.Type("T", 4, 36), + Tokens.Puncuation.TypeParameters.End(4, 37), + Tokens.Puncuation.SquareBracket.Open(4, 38), + Tokens.Puncuation.SquareBracket.Close(4, 39), + Tokens.Puncuation.TypeParameters.End(4, 40), + Tokens.Identifiers.PropertyName("Property", 4, 42), + Tokens.Puncuation.CurlyBrace.Open(4, 51), + Tokens.Keywords.Get(4, 53), + Tokens.Puncuation.Semicolon(4, 56), + Tokens.Puncuation.CurlyBrace.Close(4, 58), + Tokens.Operators.Assignment(4, 60), + Tokens.Keywords.New(4, 62), + Tokens.Type("Dictionary", 4, 66), + Tokens.Puncuation.TypeParameters.Begin(4, 76), + Tokens.Type("string", 4, 77), + Tokens.Puncuation.Comma(4, 83), + Tokens.Type("List", 4, 85), + Tokens.Puncuation.TypeParameters.Begin(4, 89), + Tokens.Type("T", 4, 90), + Tokens.Puncuation.TypeParameters.End(4, 91), + Tokens.Puncuation.SquareBracket.Open(4, 92), + Tokens.Puncuation.SquareBracket.Close(4, 93), + Tokens.Puncuation.TypeParameters.End(4, 94), + Tokens.Puncuation.Parenthesis.Open(4, 95), + Tokens.Puncuation.Parenthesis.Close(4, 96), + Tokens.Puncuation.Semicolon(4, 97), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("expression body", () => { + + const input = ` +public class Tester +{ + private string prop1 => "hello"; + private bool prop2 => true; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Private(4, 5), + Tokens.Type("string", 4, 13), + Tokens.Identifiers.PropertyName("prop1", 4, 20), + Tokens.Operators.Arrow(4, 26), + Tokens.Puncuation.String.Begin(4, 29), + Tokens.Literals.String("hello", 4, 30), + Tokens.Puncuation.String.End(4, 35), + Tokens.Puncuation.Semicolon(4, 36), + + Tokens.Keywords.Modifiers.Private(5, 5), + Tokens.Type("bool", 5, 13), + Tokens.Identifiers.PropertyName("prop2", 5, 20), + Tokens.Operators.Arrow(5, 26), + Tokens.Literals.Boolean.True(5, 29), + Tokens.Puncuation.Semicolon(5, 33), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + }); +}); diff --git a/test/syntaxes/property.test.syntax.ts b/test/syntaxes/property.test.syntax.ts deleted file mode 100644 index 788ee7f709..0000000000 --- a/test/syntaxes/property.test.syntax.ts +++ /dev/null @@ -1,142 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Property", function() { - it("declaration", function() { - -const input = ` -class Tester -{ - public IBooom Property - { - get { return null; } - set { something = value; } - } -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 6, 9)); - tokens.should.contain(Tokens.Keyword("set", 7, 9)); - }); - - it("declaration single line", function() { - -const input = ` -class Tester -{ - public IBooom Property { get { return null; } private set { something = value; } } -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 4, 30)); - tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 51)); - tokens.should.contain(Tokens.Keyword("set", 4, 59)); - }); - - - it("declaration without modifiers", function() { - -const input = ` -class Tester -{ - IBooom Property {get; set;} -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.Type("IBooom", 4, 5)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 12)); - }); - - it("auto-property single line", function() { - -const input = ` -class Tester -{ - public IBooom Property { get; set; } -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 4, 30)); - tokens.should.contain(Tokens.Keyword("set", 4, 35)); - }); - - it("auto-property", function() { - -const input = ` -class Tester -{ - public IBooom Property - { - get; - set; - } -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("IBooom", 4, 12)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 19)); - tokens.should.contain(Tokens.Keyword("get", 6, 9)); - tokens.should.contain(Tokens.Keyword("set", 7, 9)); - }); - - it("generic auto-property", function() { - -const input = ` -class Tester -{ - public Dictionary[]> Property { get; set; } -}`; - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary", 4, 12)); - tokens.should.contain(Tokens.Type("string", 4, 23)); - tokens.should.contain(Tokens.Type("List[]", 4, 31)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 42)); - tokens.should.contain(Tokens.Keyword("get", 4, 53)); - tokens.should.contain(Tokens.Keyword("set", 4, 58)); - }); - - it("auto-property initializer", function() { - -const input = ` -class Tester -{ - public Dictionary[]> Property { get; } = new Dictionary[]>(); -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.Type("Dictionary", 4, 12)); - tokens.should.contain(Tokens.Type("string", 4, 23)); - tokens.should.contain(Tokens.Type("List[]", 4, 31)); - tokens.should.contain(Tokens.PropertyIdentifier("Property", 4, 42)); - tokens.should.contain(Tokens.Keyword("get", 4, 53)); - tokens.should.contain(Tokens.StorageModifierKeyword("new", 4, 62)); - }); - }); -}); - - diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index f3089bac8c..b94a65369f 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -112,7 +112,7 @@ export namespace Tokens { createToken(text, 'entity.name.type.namespace.cs', line, column); export const PropertyName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.function.cs', line, column); + createToken(text, 'entity.name.variable.property.cs', line, column); export const StructName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.struct.cs', line, column); @@ -181,6 +181,9 @@ export namespace Tokens { export const Extern = (line?: number, column?: number) => createToken('extern', 'keyword.other.extern.cs', line, column); + export const Get = (line?: number, column?: number) => + createToken('get', 'keyword.other.get.cs', line, column); + export const Interface = (line?: number, column?: number) => createToken('interface', 'keyword.other.interface.cs', line, column); @@ -190,6 +193,9 @@ export namespace Tokens { export const New = (line?: number, column?: number) => createToken('new', 'keyword.other.new.cs', line, column); + export const Set = (line?: number, column?: number) => + createToken('set', 'keyword.other.set.cs', line, column); + export const Static = (line?: number, column?: number) => createToken('static', 'keyword.other.static.cs', line, column); @@ -309,9 +315,6 @@ export namespace Tokens { export const Type = (text: string, line?: number, column?: number) => createToken(text, 'storage.type.cs', line, column); - export const Keyword = (text: string, line?: number, column?: number) => - createToken(text, 'keyword.other.cs', line, column); - export const StringDoubleQuoted = (text: string, line?: number, column?: number) => createToken(text, 'string.quoted.double.cs', line, column); @@ -321,9 +324,6 @@ export namespace Tokens { export const EventIdentifier = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.variable.cs', line, column); - export const PropertyIdentifier = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.function.cs', line, column); - export const StringInterpolatedExpression = (text: string, line?: number, column?: number) => createToken(text, 'meta.interpolated.expression.cs', line, column); From 7a4d60b9c85855f44186fb955a43f64cfe15c6f2 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 01:19:16 -0800 Subject: [PATCH 023/192] Allow muliple declarators in field declarations --- syntaxes/csharp2.json | 3 +++ test/syntaxes/fields.test.syntax.ts | 33 +++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 266ca921b6..791cff4f0a 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -710,6 +710,9 @@ "patterns": [ { "include": "#type" + }, + { + "include": "#punctuation-comma" } ] }, diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index f27f580547..34f0ca6b93 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -187,6 +187,39 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); + + it("multiple field declarators", () => { + + const input = ` +public class Tester +{ + int x = 19, y = 23, z = 42; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("int", 4, 5), + Tokens.Identifiers.FieldName("x", 4, 9), + Tokens.Operators.Assignment(4, 11), + Tokens.Literals.Numeric.Decimal("19", 4, 13), + Tokens.Puncuation.Comma(4, 15), + Tokens.Identifiers.FieldName("y", 4, 17), + Tokens.Operators.Assignment(4, 19), + Tokens.Literals.Numeric.Decimal("23", 4, 21), + Tokens.Puncuation.Comma(4, 23), + Tokens.Identifiers.FieldName("z", 4, 25), + Tokens.Operators.Assignment(4, 27), + Tokens.Literals.Numeric.Decimal("42", 4, 29), + Tokens.Puncuation.Semicolon(4, 31), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); }); }); From b18805c21e661cdf6623519ca712a13ee9e0c81d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 12:20:20 -0800 Subject: [PATCH 024/192] Add event declarations --- syntaxes/csharp2.json | 301 ++++++++++++++++++------ syntaxes/syntax.md | 41 ++++ test/syntaxes/event.test.syntax.ts | 52 ---- test/syntaxes/events.test.syntax.ts | 142 +++++++++++ test/syntaxes/fields.test.syntax.ts | 2 +- test/syntaxes/properties.test.syntax.ts | 29 ++- test/syntaxes/utils/tokenizer.ts | 18 +- 7 files changed, 451 insertions(+), 134 deletions(-) create mode 100644 syntaxes/syntax.md delete mode 100644 test/syntaxes/event.test.syntax.ts create mode 100644 test/syntaxes/events.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 791cff4f0a..ddc34b686c 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -32,6 +32,95 @@ } ] }, + "declarations": { + "patterns": [ + { + "include": "#namespace-declaration" + }, + { + "include": "#type-declarations" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, + "type-declarations": { + "patterns": [ + { + "include": "#class-declaration" + }, + { + "include": "#delegate-declaration" + }, + { + "include": "#enum-declaration" + }, + { + "include": "#interface-declaration" + }, + { + "include": "#struct-declaration" + } + ] + }, + "class-members": { + "patterns": [ + { + "include": "#type-declarations" + }, + { + "include": "#event-declaration" + }, + { + "include": "#property-declaration" + }, + { + "include": "#variable-initializer" + }, + { + "include": "#field-declaration" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, + "struct-members": { + "patterns": [ + { + "include": "#type-declarations" + }, + { + "include": "#event-declaration" + }, + { + "include": "#property-declaration" + }, + { + "include": "#variable-initializer" + }, + { + "include": "#field-declaration" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, + "statements": { + "patterns": [] + }, + "expressions": { + "patterns": [ + { + "include": "#literal" + }, + { + "include": "#object-creation-expression" + } + ] + }, "extern-alias-directive": { "begin": "\\s*(extern)\\b\\s*(alias)\\b\\s*([_$[:alpha:]][_$[:alnum:]]*)", "beginCaptures": { @@ -171,7 +260,7 @@ "include": "#attribute-named-argument" }, { - "include": "#expression" + "include": "#expressions" }, { "include": "#punctuation-comma" @@ -191,20 +280,7 @@ "include": "#operator-assignment" }, { - "include": "#expression" - } - ] - }, - "declarations": { - "patterns": [ - { - "include": "#namespace-declaration" - }, - { - "include": "#type-declaration" - }, - { - "include": "#punctuation-semicolon" + "include": "#expressions" } ] }, @@ -254,25 +330,6 @@ } ] }, - "type-declaration": { - "patterns": [ - { - "include": "#class-declaration" - }, - { - "include": "#delegate-declaration" - }, - { - "include": "#enum-declaration" - }, - { - "include": "#interface-declaration" - }, - { - "include": "#struct-declaration" - } - ] - }, "class-declaration": { "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class)\\s+)", "end": "(?<=\\})", @@ -337,19 +394,7 @@ }, "patterns": [ { - "include": "#type-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#variable-initializer" - }, - { - "include": "#field-declaration" - }, - { - "include": "#punctuation-semicolon" + "include": "#class-members" } ] } @@ -618,19 +663,7 @@ }, "patterns": [ { - "include": "#type-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#variable-initializer" - }, - { - "include": "#field-declaration" - }, - { - "include": "#punctuation-semicolon" + "include": "#struct-members" } ] } @@ -685,7 +718,7 @@ ] }, "field-declaration": { - "begin": "(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?!.*\\b(?:event)\\b)(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*\\s*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { @@ -727,7 +760,7 @@ ] }, "property-declaration": { - "begin": "(?=(?!.*\\b(?:class|interface|struct|enum)\\b)(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*\\s*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { @@ -814,6 +847,9 @@ }, "3": { "patterns": [ + { + "include": "#statements" + }, { "include": "#punctuation-semicolon" } @@ -866,6 +902,133 @@ } ] }, + "event-declaration": { + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "name": "keyword.other.event.cs" + }, + "3": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "4": { + "patterns": [ + { + "match": "[_$[:alpha:]][_$[:alnum:]]*", + "name": "entity.name.variable.event.cs" + }, + { + "include": "#punctuation-comma" + } + ] + } + } + }, + { + "include": "#event-accessors" + }, + { + "include": "#punctuation-comma" + } + ] + }, + "event-accessors": { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "match": "\\b(add|remove)\\s*(;)", + "captures": { + "1": { + "patterns": [ + { + "match": "add", + "name": "keyword.other.add.cs" + }, + { + "match": "remove", + "name": "keyword.other.remove.cs" + } + ] + }, + "2": { + "patterns": [ + { + "include": "#punctuation-semicolon" + } + ] + } + } + }, + { + "begin": "\\b(add|remove)\\b\\s*(\\{)", + "beginCaptures": { + "1": { + "patterns": [ + { + "match": "add", + "name": "keyword.other.add.cs" + }, + { + "match": "remove", + "name": "keyword.other.remove.cs" + } + ] + }, + "2": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#statements" + }, + { + "include": "#punctuation-semicolon" + } + ] + } + ] + }, "variable-initializer": { "begin": "(?)", "beginCaptures": { @@ -876,7 +1039,7 @@ "end": "(?=[,\\);}])", "patterns": [ { - "include": "#expression" + "include": "#expressions" } ] }, @@ -890,17 +1053,7 @@ "end": "(?=[,\\);}])", "patterns": [ { - "include": "#expression" - } - ] - }, - "expression": { - "patterns": [ - { - "include": "#literal" - }, - { - "include": "#object-creation-expression" + "include": "#expressions" } ] }, diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md new file mode 100644 index 0000000000..478c79ae2c --- /dev/null +++ b/syntaxes/syntax.md @@ -0,0 +1,41 @@ +Important regular expressions: + +#### Identifier + +* Expression: `[_$[:alpha:]][_$[:alnum:]]*` +* Matches: `_`, `Ident42` + +#### Dotted name + +* Expression: `([_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)` +* Matches: `System.Collections.Generic.Dictionary` + +#### Generic name + +* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?)` +* Matches: `System.Collections.Generic.Dictionary>` + +#### Array suffix + +* Expression: `(?:(?:\[,*\])*)` +* Matches: `[][,][,,]` + +#### Pointer suffix + +* Expression: `(?:(?:\*)*)?` +* Matches: `int*` + +#### Type name + +* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` +* Matches: `System.Collections.Generic.Dictionary, System.List>>` + +#### Event declarations + +* Expression: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?=\{|;|$)` +* Break down: + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b)*)` + * Event keyword: `\s*\b(?event)\b` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` + * End: `\s*(?=\{|;|$)` \ No newline at end of file diff --git a/test/syntaxes/event.test.syntax.ts b/test/syntaxes/event.test.syntax.ts deleted file mode 100644 index 3e99f3b339..0000000000 --- a/test/syntaxes/event.test.syntax.ts +++ /dev/null @@ -1,52 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("Event", function() { - it("declaration", function() { - -const input = ` -public class Tester -{ - public event Type Event; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); - tokens.should.contain(Tokens.Type("Type", 4, 18)); - tokens.should.contain(Tokens.EventIdentifier("Event", 4, 23)); - }); - - it("generic", function () { - - const input = ` -public class Tester -{ - public event EventHandler, Dictionary> Event; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5)); - tokens.should.contain(Tokens.StorageModifierKeyword("event", 4, 12)); - tokens.should.contain(Tokens.Type("EventHandler", 4, 18)); - tokens.should.contain(Tokens.Type("List", 4, 31)); - tokens.should.contain(Tokens.Type("Dictionary", 4, 40)); - tokens.should.contain(Tokens.EventIdentifier("Event", 4, 58)); - }); - }); -}); - - diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts new file mode 100644 index 0000000000..36f415d5dc --- /dev/null +++ b/test/syntaxes/events.test.syntax.ts @@ -0,0 +1,142 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Events", () => { + it("declaration", () => { + + const input = ` +public class Tester +{ + public event Type Event; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Keywords.Event(4, 12), + Tokens.Type("Type", 4, 18), + Tokens.Identifiers.EventName("Event", 4, 23), + Tokens.Puncuation.Semicolon(4, 28), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("declaration with multiple declarators", () => { + + const input = ` +public class Tester +{ + public event Type Event1, Event2; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Keywords.Event(4, 12), + Tokens.Type("Type", 4, 18), + Tokens.Identifiers.EventName("Event1", 4, 23), + Tokens.Puncuation.Comma(4, 29), + Tokens.Identifiers.EventName("Event2", 4, 31), + Tokens.Puncuation.Semicolon(4, 37), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("generic", () => { + + const input = ` +public class Tester +{ + public event EventHandler, Dictionary> Event; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Keywords.Event(4, 12), + Tokens.Type("EventHandler", 4, 18), + Tokens.Puncuation.TypeParameters.Begin(4, 30), + Tokens.Type("List", 4, 31), + Tokens.Puncuation.TypeParameters.Begin(4, 35), + Tokens.Type("T", 4, 36), + Tokens.Puncuation.TypeParameters.End(4, 37), + Tokens.Puncuation.Comma(4, 38), + Tokens.Type("Dictionary", 4, 40), + Tokens.Puncuation.TypeParameters.Begin(4, 50), + Tokens.Type("T", 4, 51), + Tokens.Puncuation.Comma(4, 52), + Tokens.Type("D", 4, 54), + Tokens.Puncuation.TypeParameters.End(4, 55), + Tokens.Puncuation.TypeParameters.End(4, 56), + Tokens.Identifiers.EventName("Event", 4, 58), + Tokens.Puncuation.Semicolon(4, 63), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("declaration with accessors", () => { + + const input = ` +public class Tester +{ + public event Type Event + { + add { } + remove { } + } +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Keywords.Event(4, 12), + Tokens.Type("Type", 4, 18), + Tokens.Identifiers.EventName("Event", 4, 23), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Add(6, 9), + Tokens.Puncuation.CurlyBrace.Open(6, 13), + Tokens.Puncuation.CurlyBrace.Close(6, 15), + Tokens.Keywords.Remove(7, 9), + Tokens.Puncuation.CurlyBrace.Open(7, 16), + Tokens.Puncuation.CurlyBrace.Close(7, 18), + Tokens.Puncuation.CurlyBrace.Close(8, 5), + + Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + }); + }); +}); + + diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 34f0ca6b93..cf4516e06f 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -188,7 +188,7 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); - it("multiple field declarators", () => { + it("declaration with multiple declarators", () => { const input = ` public class Tester diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index bb8cc3fb64..9d844e9b17 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -78,7 +78,6 @@ class Tester Tokens.Puncuation.CurlyBrace.Close(5, 1)]); }); - it("declaration without modifiers", () => { const input = ` @@ -132,6 +131,34 @@ class Tester Tokens.Puncuation.CurlyBrace.Close(5, 1)]); }); + it("auto-property single line (protected internal)", function () { + + const input = ` +class Tester +{ + protected internal IBooom Property { get; set; } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Protected(4, 5), + Tokens.Keywords.Modifiers.Internal(4, 15), + Tokens.Type("IBooom", 4, 24), + Tokens.Identifiers.PropertyName("Property", 4, 31), + Tokens.Puncuation.CurlyBrace.Open(4, 40), + Tokens.Keywords.Get(4, 42), + Tokens.Puncuation.Semicolon(4, 45), + Tokens.Keywords.Set(4, 47), + Tokens.Puncuation.Semicolon(4, 50), + Tokens.Puncuation.CurlyBrace.Close(4, 52), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + it("auto-property", () => { const input = ` diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index b94a65369f..caa228215b 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -102,6 +102,9 @@ export namespace Tokens { export const EnumName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.enum.cs', line, column); + export const EventName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.variable.event.cs', line, column); + export const FieldName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.variable.field.cs', line, column); @@ -163,6 +166,9 @@ export namespace Tokens { createToken('static', 'storage.modifier.cs', line, column); } + export const Add = (line?: number, column?: number) => + createToken('add', 'keyword.other.add.cs', line, column); + export const Alias = (line?: number, column?: number) => createToken('alias', 'keyword.other.alias.cs', line, column); @@ -178,6 +184,9 @@ export namespace Tokens { export const Enum = (line?: number, column?: number) => createToken('enum', 'keyword.other.enum.cs', line, column); + export const Event = (line?: number, column?: number) => + createToken('event', 'keyword.other.event.cs', line, column); + export const Extern = (line?: number, column?: number) => createToken('extern', 'keyword.other.extern.cs', line, column); @@ -193,6 +202,9 @@ export namespace Tokens { export const New = (line?: number, column?: number) => createToken('new', 'keyword.other.new.cs', line, column); + export const Remove = (line?: number, column?: number) => + createToken('remove', 'keyword.other.remove.cs', line, column); + export const Set = (line?: number, column?: number) => createToken('set', 'keyword.other.set.cs', line, column); @@ -309,9 +321,6 @@ export namespace Tokens { createToken(text, 'variable.parameter.cs', line, column); } - export const StorageModifierKeyword = (text: string, line?: number, column?: number) => - createToken(text, 'storage.modifier.cs', line, column); - export const Type = (text: string, line?: number, column?: number) => createToken(text, 'storage.type.cs', line, column); @@ -321,9 +330,6 @@ export namespace Tokens { export const StringDoubleQuotedVerbatim = (text: string, line?: number, column?: number) => createToken(text, 'string.quoted.double.literal.cs', line, column); - export const EventIdentifier = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.variable.cs', line, column); - export const StringInterpolatedExpression = (text: string, line?: number, column?: number) => createToken(text, 'meta.interpolated.expression.cs', line, column); From 28008bc321d0760fd36123dadef295ab36a53664 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 12:53:59 -0800 Subject: [PATCH 025/192] Refine field declarations --- syntaxes/csharp2.json | 28 +++++++++++++++++++--------- syntaxes/syntax.md | 16 ++++++++++++++-- test/syntaxes/events.test.syntax.ts | 26 ++++++++++++++++++++++++++ 3 files changed, 59 insertions(+), 11 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index ddc34b686c..51c83676ba 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -718,15 +718,15 @@ ] }, "field-declaration": { - "begin": "(?!.*\\b(?:event)\\b)(?=(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\b)*\\s*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b((?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=;|=)", - "captures": { + "begin": "\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", + "beginCaptures": { "1": { "patterns": [ { @@ -743,16 +743,26 @@ "patterns": [ { "include": "#type" - }, - { - "include": "#punctuation-comma" } ] }, "3": { "name": "entity.name.variable.field.cs" } - } + }, + "end": "(?=;)", + "patterns": [ + { + "match": "[_$[:alpha:]][_$[:alnum:]]*", + "name": "entity.name.variable.field.cs" + }, + { + "include": "#punctuation-comma" + }, + { + "include": "#variable-initializer" + } + ] }, { "include": "#variable-initializer" @@ -903,14 +913,14 @@ ] }, "event-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", "captures": { "1": { "patterns": [ diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 478c79ae2c..3b40bfc0f1 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -30,11 +30,23 @@ Important regular expressions: * Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` * Matches: `System.Collections.Generic.Dictionary, System.List>>` +#### Field declaratiosn + +The strategy for field declarations is to match up to the end of the field name. Note that this is the first field name in the case of multiple declarators. +Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. + +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` +* Break down: + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * First field name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)*)` + * End: `\s*(?!=>)(?:;|=)` + #### Event declarations -* Expression: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?=\{|;|$)` +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` * Break down: - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b)*)` + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Event keyword: `\s*\b(?event)\b` * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 36f415d5dc..c06a1c9886 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -36,6 +36,32 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(5, 1)]); }); + it("declaration with multiple modifiers", () => { + + const input = ` +public class Tester +{ + protected internal event Type Event; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Protected(4, 5), + Tokens.Keywords.Modifiers.Internal(4, 15), + Tokens.Keywords.Event(4, 24), + Tokens.Type("Type", 4, 30), + Tokens.Identifiers.EventName("Event", 4, 35), + Tokens.Puncuation.Semicolon(4, 40), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + it("declaration with multiple declarators", () => { const input = ` From 47e7a222c737964ecceb418e679fc4d57a9437ed Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 13:11:22 -0800 Subject: [PATCH 026/192] Refine property declarations --- syntaxes/csharp2.json | 4 ++-- syntaxes/syntax.md | 22 ++++++++++++++++++++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 51c83676ba..69abfebb8f 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -770,14 +770,14 @@ ] }, "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b)*\\s*(?:[_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s+(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b((?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=(\\{|=>|$))", + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", "captures": { "1": { "patterns": [ diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 3b40bfc0f1..f58b4f9311 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -10,6 +10,11 @@ Important regular expressions: * Expression: `([_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)` * Matches: `System.Collections.Generic.Dictionary` +#### Simple generic name + +* Expression: `(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` +* Matches: `C` + #### Generic name * Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?)` @@ -32,7 +37,7 @@ Important regular expressions: #### Field declaratiosn -The strategy for field declarations is to match up to the end of the field name. Note that this is the first field name in the case of multiple declarators. +Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. * Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` @@ -50,4 +55,17 @@ Further field names are matched by looking for identifiers, #punctuation-comma, * Event keyword: `\s*\b(?event)\b` * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` - * End: `\s*(?=\{|;|$)` \ No newline at end of file + * End: `\s*(?=\{|;|$)` + +#### Property declarations + +Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property +if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. + +* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` +* Break down: + * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` + * End: `\s*(?:\{|=>|$))` \ No newline at end of file From e6c0532a166a402f384167e2ad1a9e633fd961ed Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 27 Dec 2016 13:58:04 -0800 Subject: [PATCH 027/192] Add indexer declarations --- syntaxes/csharp2.json | 113 +++++++++++++++++++++++++- syntaxes/syntax.md | 11 ++- test/syntaxes/indexers.test.syntax.ts | 48 +++++++++++ test/syntaxes/utils/tokenizer.ts | 3 + 4 files changed, 173 insertions(+), 2 deletions(-) create mode 100644 test/syntaxes/indexers.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 69abfebb8f..cf09352523 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -78,6 +78,9 @@ { "include": "#variable-initializer" }, + { + "include": "#indexer-declaration" + }, { "include": "#field-declaration" }, @@ -98,11 +101,14 @@ "include": "#property-declaration" }, { - "include": "#variable-initializer" + "include": "#indexer-declaration" }, { "include": "#field-declaration" }, + { + "include": "#variable-initializer" + }, { "include": "#punctuation-semicolon" } @@ -912,6 +918,60 @@ } ] }, + "indexer-declaration": { + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "3": { + "name": "keyword.other.this.cs" + } + } + }, + { + "begin": "(?=\\[)", + "end": "(?=\\])", + "patterns": [ + { + "include": "#bracketed-parameter-list" + } + ] + }, + { + "include": "#property-accessors" + }, + { + "include": "#expression-body" + }, + { + "include": "#variable-initializer" + } + ] + }, "event-declaration": { "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", @@ -1154,6 +1214,57 @@ } ] }, + "bracketed-parameter-list": { + "begin": "(?=(\\[))", + "beginCaptures": { + "1": { + "name": "punctuation.squarebracket.open.cs" + } + }, + "end": "(?=(\\]))", + "endCaptures": { + "1": { + "name": "punctuation.squarebracket.close.cs" + } + }, + "patterns": [ + { + "comment": "Note: We have to be careful here to skip the [. Otherwise, attributes will conflict.", + "begin": "(?<=\\[)", + "end": "(?=\\])", + "patterns": [ + { + "include": "#comment" + }, + { + "include": "#attribute-section" + }, + { + "match": "\\b(ref|params|out)\\b", + "name": "storage.modifier.cs" + }, + { + "comment": "parameter name", + "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=[,\\]])", + "captures": { + "1": { + "name": "variable.parameter.cs" + } + } + }, + { + "include": "#variable-initializer" + }, + { + "include": "#type" + }, + { + "include": "#punctuation-comma" + } + ] + } + ] + }, "parenthesized-parameter-list": { "begin": "(?=(\\())", "beginCaptures": { diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index f58b4f9311..f8a39c2764 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -68,4 +68,13 @@ if you consider that regular expressions don't know that "class" is a keyword. T * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\{|=>|$))` \ No newline at end of file + * End: `\s*(?:\{|=>|$))` + +#### Indexer declarations + +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` +* Break down: + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Property name: `\s+(?this)` + * End: `\s*(?:\[))` \ No newline at end of file diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts new file mode 100644 index 0000000000..a299814ccd --- /dev/null +++ b/test/syntaxes/indexers.test.syntax.ts @@ -0,0 +1,48 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Indexers", () => { + it("declaration", () => { + + const input = ` +class Tester +{ + public string this[int index] + { + get { return index.ToString(); } + } +}`; + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Keywords.Modifiers.Public(4, 5), + Tokens.Type("string", 4, 12), + Tokens.Keywords.This(4, 19), + Tokens.Puncuation.SquareBracket.Open(4, 23), + Tokens.Type("int", 4, 24), + Tokens.Variables.Parameter("index", 4, 28), + Tokens.Puncuation.SquareBracket.Close(4, 33), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Get(6, 9), + Tokens.Puncuation.CurlyBrace.Open(6, 13), + Tokens.Puncuation.Semicolon(6, 38), + Tokens.Puncuation.CurlyBrace.Close(6, 40), + Tokens.Puncuation.CurlyBrace.Close(7, 5), + + Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + }); + }); +}); diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index caa228215b..c718b978be 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -214,6 +214,9 @@ export namespace Tokens { export const Struct = (line?: number, column?: number) => createToken('struct', 'keyword.other.struct.cs', line, column); + export const This = (line?: number, column?: number) => + createToken('this', 'keyword.other.this.cs', line, column); + export const Using = (line?: number, column?: number) => createToken('using', 'keyword.other.using.cs', line, column); From e406840bff37da6e9f74d1bb9a255634d0b47688 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 03:17:43 -0800 Subject: [PATCH 028/192] Add interpolated strings --- syntaxes/csharp2.json | 132 ++++++++--- .../interpolated-strings.test.syntax.ts | 208 ++++++++++++++++++ test/syntaxes/string.test.syntax.ts | 128 ----------- test/syntaxes/utils/tokenizer.ts | 42 ++-- 4 files changed, 338 insertions(+), 172 deletions(-) create mode 100644 test/syntaxes/interpolated-strings.test.syntax.ts delete mode 100644 test/syntaxes/string.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index cf09352523..a1ddfe0525 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -119,11 +119,20 @@ }, "expressions": { "patterns": [ + { + "include": "#object-creation-expression" + }, + { + "include": "#interpolated-string" + }, + { + "include": "#verbatim-interpolated-string" + }, { "include": "#literal" }, { - "include": "#object-creation-expression" + "include": "#identifier" } ] }, @@ -1127,6 +1136,69 @@ } ] }, + "interpolated-string": { + "name": "string.quoted.double.cs", + "begin": "\\$\"", + "beginCaptures": { + "0": { + "name": "punctuation.definition.string.begin.cs" + } + }, + "end": "(\")|((?:[^\\\\\\n])$)", + "endCaptures": { + "1": { + "name": "punctuation.definition.string.end.cs" + }, + "2": { + "name": "invalid.illegal.newline.cs" + } + }, + "patterns": [ + { + "include": "#interpolation" + } + ] + }, + "verbatim-interpolated-string": { + "name": "string.quoted.double.cs", + "begin": "\\$@\"", + "beginCaptures": { + "0": { + "name": "punctuation.definition.string.begin.cs" + } + }, + "end": "\"", + "endCaptures": { + "0": { + "name": "punctuation.definition.string.end.cs" + } + }, + "patterns": [ + { + "include": "#interpolation" + } + ] + }, + "interpolation": { + "name": "meta.interpolation.cs", + "begin": "(?<=[^\\{])(?:\\{\\{)*(\\{)(?=[^\\{])", + "beginCaptures": { + "1": { + "name": "punctuation.definition.interpolation.begin.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.definition.interpolation.end.cs" + } + }, + "patterns": [ + { + "include": "#expressions" + } + ] + }, "literal": { "patterns": [ { @@ -1193,6 +1265,37 @@ } ] }, + "string-literal": { + "name": "string.quoted.double.cs", + "begin": "\"", + "beginCaptures": { + "0": { + "name": "punctuation.definition.string.begin.cs" + } + }, + "end": "(\")|((?:[^\\\\\\n])$)", + "endCaptures": { + "1": { + "name": "punctuation.definition.string.end.cs" + }, + "2": { + "name": "invalid.illegal.newline.cs" + } + }, + "patterns": [ + { + "include": "#string-character-escape" + } + ] + }, + "string-character-escape": { + "name": "constant.character.escape.cs", + "match": "\\\\." + }, + "identifier": { + "name": "variable.other.readwrite.cs", + "match": "[_$[:alpha:]][_$[:alnum:]]*" + }, "object-creation-expression": { "begin": "(new)\\s+([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*(?=\\()", "beginCaptures": { @@ -1309,33 +1412,6 @@ } ] }, - "string-literal": { - "name": "string.quoted.double.cs", - "begin": "\"", - "beginCaptures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "end": "(\")|((?:[^\\\\\\n])$)", - "endCaptures": { - "1": { - "name": "punctuation.definition.string.end.cs" - }, - "2": { - "name": "invalid.illegal.newline.cs" - } - }, - "patterns": [ - { - "include": "#string-character-escape" - } - ] - }, - "string-character-escape": { - "name": "constant.character.escape.cs", - "match": "\\\\." - }, "type": { "name": "meta.type.cs", "patterns": [ diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts new file mode 100644 index 0000000000..096554025d --- /dev/null +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -0,0 +1,208 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { Tokens } from './utils/tokenizer'; +import { TokenizerUtil } from './utils/tokenizerUtil'; + +describe("Grammar", () => { + before(() => should()); + + describe("Interpolated strings", () => { + it("two interpolations", () => { + + const input = ` +public class Tester +{ + string test = $"hello {one} world {two}!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hello ", 4, 21), + Tokens.Puncuation.Interpolation.Begin(4, 27), + Tokens.Variables.ReadWrite("one", 4, 28), + Tokens.Puncuation.Interpolation.End(4, 31), + Tokens.Literals.String(" world ", 4, 32), + Tokens.Puncuation.Interpolation.Begin(4, 39), + Tokens.Variables.ReadWrite("two", 4, 40), + Tokens.Puncuation.Interpolation.End(4, 43), + Tokens.Literals.String("!", 4, 44), + Tokens.Puncuation.InterpolatedString.End(4, 45), + Tokens.Puncuation.Semicolon(4, 46), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("no interpolations", () => { + + const input = ` +public class Tester +{ + string test = $"hello world!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hello world!", 4, 21), + Tokens.Puncuation.InterpolatedString.End(4, 33), + Tokens.Puncuation.Semicolon(4, 34), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("break across two lines (non-verbatim)", () => { + + const input = ` +public class Tester +{ + string test = $"hello +world!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hell", 4, 21), + + // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. + Tokens.IllegalNewLine("o", 4, 25), + Tokens.Variables.ReadWrite("world", 5, 1), + Tokens.Puncuation.String.Begin(5, 7), + Tokens.IllegalNewLine(";", 5, 8)]); + }); + + it("verbatim with two interpolations", () => { + + const input = ` +public class Tester +{ + string test = $@"hello {one} world {two}!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), + Tokens.Literals.String("hello ", 4, 22), + Tokens.Puncuation.Interpolation.Begin(4, 28), + Tokens.Variables.ReadWrite("one", 4, 29), + Tokens.Puncuation.Interpolation.End(4, 32), + Tokens.Literals.String(" world ", 4, 33), + Tokens.Puncuation.Interpolation.Begin(4, 40), + Tokens.Variables.ReadWrite("two", 4, 41), + Tokens.Puncuation.Interpolation.End(4, 44), + Tokens.Literals.String("!", 4, 45), + Tokens.Puncuation.InterpolatedString.End(4, 46), + Tokens.Puncuation.Semicolon(4, 47), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("break across two lines with two interpolations (verbatim)", () => { + + const input = ` +public class Tester +{ + string test = $@"hello {one} + world {two}!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), + Tokens.Literals.String("hello ", 4, 22), + Tokens.Puncuation.Interpolation.Begin(4, 28), + Tokens.Variables.ReadWrite("one", 4, 29), + Tokens.Puncuation.Interpolation.End(4, 32), + Tokens.Literals.String(" world ", 5, 1), + Tokens.Puncuation.Interpolation.Begin(5, 11), + Tokens.Variables.ReadWrite("two", 5, 12), + Tokens.Puncuation.Interpolation.End(5, 15), + Tokens.Literals.String("!", 5, 16), + Tokens.Puncuation.InterpolatedString.End(5, 17), + Tokens.Puncuation.Semicolon(5, 18), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + + it("break across two lines with no interpolations (verbatim)", () => { + + const input = ` +public class Tester +{ + string test = $@"hello + world!"; +}`; + + let tokens = TokenizerUtil.tokenize2(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), + Tokens.Literals.String("hello", 4, 22), + Tokens.Literals.String(" world!", 5, 1), + Tokens.Puncuation.InterpolatedString.End(5, 11), + Tokens.Puncuation.Semicolon(5, 12), + + Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + }); + }); +}); + + diff --git a/test/syntaxes/string.test.syntax.ts b/test/syntaxes/string.test.syntax.ts deleted file mode 100644 index b9e32abded..0000000000 --- a/test/syntaxes/string.test.syntax.ts +++ /dev/null @@ -1,128 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from'./utils/tokenizerUtil'; - -describe("Grammar", function() { - before(function() { - should(); - }); - - describe("String interpolated", function() { - it("non-verbatim", function() { - -const input = ` -public class Tester -{ - string test = $"hello {one} world {two}!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello ", 4, 21)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 28)); - tokens.should.contain(Tokens.StringDoubleQuoted(" world ", 4, 32)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 4, 40)); - tokens.should.contain(Tokens.StringDoubleQuoted("!", 4, 44)); - tokens.should.contain(Tokens.StringEnd('"', 4, 45)); - }); - - - it("non-verbatim without expressions single-line", function() { - -const input = ` -public class Tester -{ - string test = $"hello world!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello world!", 4, 21)); - tokens.should.contain(Tokens.StringEnd('"', 4, 33)); - }); - - it("non-verbatim multi-line", function() { - -const input = ` -public class Tester -{ - string test = $"hello -world!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuoted("hello", 4, 21)); - tokens.should.not.contain(Tokens.StringDoubleQuoted("world!", 5, 1)); - tokens.should.not.contain(Tokens.StringEnd('"', 5, 7)); - }); - - - it("verbatim single-line", function() { - -const input = ` -public class Tester -{ - string test = $@"hello {one} world {two}!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 29)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world ", 4, 33)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 4, 41)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("!", 4, 45)); - tokens.should.contain(Tokens.StringEnd('"', 4, 46)); - }); - - - it("verbatim multi-line", function() { - -const input = ` -public class Tester -{ - string test = $@"hello {one} - world {two}!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello ", 4, 22)); - tokens.should.contain(Tokens.StringInterpolatedExpression("one", 4, 29)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world ", 5, 1)); - tokens.should.contain(Tokens.StringInterpolatedExpression("two", 5, 12)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("!", 5, 16)); - tokens.should.contain(Tokens.StringEnd('"', 5, 17)); - }); - - it("verbatim multi-line without expressions", function() { - -const input = ` -public class Tester -{ - string test = $@"hello - world!"; -}`; - - let tokens = TokenizerUtil.tokenize(input); - - tokens.should.contain(Tokens.StringStart('$@"', 4, 19)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim("hello", 4, 22)); - tokens.should.contain(Tokens.StringDoubleQuotedVerbatim(" world!", 5, 1)); - tokens.should.contain(Tokens.StringEnd('"', 5, 11)); - }); - }); -}); - - diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index c718b978be..a693cf4d90 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -9,7 +9,7 @@ export class Tokenizer { private _registry: Registry; private _grammar: IGrammar; - private static readonly _excludedTypes: string[] = ['source.cs', 'meta.type.parameters.cs']; + private static readonly _excludedTypes: string[] = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs']; constructor(grammarFilePath: string) { this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); @@ -277,6 +277,25 @@ export namespace Tokens { createToken('{', 'punctuation.curlybrace.open.cs', line, column); } + export namespace Interpolation { + export const Begin = (line?: number, column?: number) => + createToken('{', 'punctuation.definition.interpolation.begin.cs', line, column); + + export const End = (line?: number, column?: number) => + createToken('}', 'punctuation.definition.interpolation.end.cs', line, column); + } + + export namespace InterpolatedString { + export const Begin = (line?: number, column?: number) => + createToken('$"', 'punctuation.definition.string.begin.cs', line, column); + + export const End = (line?: number, column?: number) => + createToken('"', 'punctuation.definition.string.end.cs', line, column); + + export const VerbatimBegin = (line?: number, column?: number) => + createToken('$@"', 'punctuation.definition.string.begin.cs', line, column); + } + export namespace Parenthesis { export const Close = (line?: number, column?: number) => createToken(')', 'punctuation.parenthesis.close.cs', line, column); @@ -322,23 +341,14 @@ export namespace Tokens { export const Parameter = (text: string, line?: number, column?: number) => createToken(text, 'variable.parameter.cs', line, column); + + export const ReadWrite = (text: string, line?: number, column?: number) => + createToken(text, 'variable.other.readwrite.cs', line, column); } + export const IllegalNewLine = (text: string, line?: number, column?: number) => + createToken(text, 'invalid.illegal.newline.cs', line, column); + export const Type = (text: string, line?: number, column?: number) => createToken(text, 'storage.type.cs', line, column); - - export const StringDoubleQuoted = (text: string, line?: number, column?: number) => - createToken(text, 'string.quoted.double.cs', line, column); - - export const StringDoubleQuotedVerbatim = (text: string, line?: number, column?: number) => - createToken(text, 'string.quoted.double.literal.cs', line, column); - - export const StringInterpolatedExpression = (text: string, line?: number, column?: number) => - createToken(text, 'meta.interpolated.expression.cs', line, column); - - export const StringStart = (text: string, line?: number, column?: number) => - createToken(text, 'punctuation.definition.string.begin.cs', line, column); - - export const StringEnd = (text: string, line?: number, column?: number) => - createToken(text, 'punctuation.definition.string.end.cs', line, column); } From b5c61eb95f27030f0db8f991ba128839ab96ed82 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 03:19:50 -0800 Subject: [PATCH 029/192] Refactor tests a bit --- test/syntaxes/attributes.test.syntax.ts | 20 +++++++++---------- test/syntaxes/boolean-literals.test.syntax.ts | 4 ++-- test/syntaxes/classes.test.syntax.ts | 10 +++++----- test/syntaxes/comments.test.syntax.ts | 6 +++--- test/syntaxes/delegates.test.syntax.ts | 8 ++++---- test/syntaxes/enums.test.syntax.ts | 10 +++++----- test/syntaxes/events.test.syntax.ts | 10 +++++----- test/syntaxes/extern-aliases.test.syntax.ts | 2 +- test/syntaxes/fields.test.syntax.ts | 12 +++++------ test/syntaxes/indexers.test.syntax.ts | 2 +- test/syntaxes/interfaces.test.syntax.ts | 10 +++++----- .../interpolated-strings.test.syntax.ts | 12 +++++------ test/syntaxes/namespaces.test.syntax.ts | 8 ++++---- test/syntaxes/numeric-literals.test.syntax.ts | 2 +- test/syntaxes/properties.test.syntax.ts | 18 ++++++++--------- test/syntaxes/structs.test.syntax.ts | 8 ++++---- test/syntaxes/using-directives.test.syntax.ts | 14 ++++++------- test/syntaxes/utils/tokenizerUtil.ts | 7 +------ 18 files changed, 79 insertions(+), 84 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index cf5df10d5e..b82e414260 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const input = ` [Foo]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -29,7 +29,7 @@ describe("Grammar", () => { const input = ` [assembly: Foo]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -44,7 +44,7 @@ describe("Grammar", () => { const input = ` [module: Foo, Bar]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -61,7 +61,7 @@ describe("Grammar", () => { const input = ` [module: Foo(), Bar()]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -82,7 +82,7 @@ describe("Grammar", () => { const input = ` [Foo(true)]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -98,7 +98,7 @@ describe("Grammar", () => { const input = ` [Foo(true, 42)]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -116,7 +116,7 @@ describe("Grammar", () => { const input = ` [Foo(true, 42, "text")]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -138,7 +138,7 @@ describe("Grammar", () => { const input = ` [Foo(Bar = 42)]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -156,7 +156,7 @@ describe("Grammar", () => { const input = ` [Foo(true, Bar = 42)]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -176,7 +176,7 @@ describe("Grammar", () => { const input = ` [module: Foo(true, Bar = 42, Baz = "hello")]`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index f8e8474e92..b9c9197d8b 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -20,7 +20,7 @@ class C { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -39,7 +39,7 @@ class C { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index da7383bd5b..6271cc60d0 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -34,7 +34,7 @@ namespace TestNamespace abstract class DefaultAbstractClass { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -107,7 +107,7 @@ namespace TestNamespace { class Dictionary { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -131,7 +131,7 @@ namespace TestNamespace class PublicClass : Root.IInterface, Something.IInterfaceTwo { } class PublicClass : Dictionary>, IMap> { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -209,7 +209,7 @@ namespace TestNamespace { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -272,7 +272,7 @@ namespace TestNamespace } } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index af87535d1e..96466026ce 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Comment.SingleLine.Start(2, 1), @@ -28,7 +28,7 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Comment.LeadingWhitespace(" ", 2, 1), @@ -41,7 +41,7 @@ describe("Grammar", () => { const input = ` /* foo */`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Comment.MultiLine.Start(2, 1), diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index df13ef79da..98882479f5 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { delegate void D(); `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -34,7 +34,7 @@ delegate void D(); delegate TResult D(T arg1); `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -54,7 +54,7 @@ delegate void D() where T1 : T2; `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -75,7 +75,7 @@ delegate void D() delegate int D(ref string x, out int y, params object[] z); `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 4e700916f3..b24681d777 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { enum E { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -32,7 +32,7 @@ enum E { } enum E : byte { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -49,7 +49,7 @@ enum E : byte { } enum E { M1 } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -65,7 +65,7 @@ enum E { M1 } enum Color { Red, Green, Blue } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -90,7 +90,7 @@ enum E } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index c06a1c9886..d22becdb72 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -19,7 +19,7 @@ public class Tester public event Type Event; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -44,7 +44,7 @@ public class Tester protected internal event Type Event; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -70,7 +70,7 @@ public class Tester public event Type Event1, Event2; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -97,7 +97,7 @@ public class Tester public event EventHandler, Dictionary> Event; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -139,7 +139,7 @@ public class Tester } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index fa08a7229d..c534f6f654 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { extern alias X; extern alias Y;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Extern(2, 1), diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index cf4516e06f..2ad57bc316 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -21,7 +21,7 @@ public class Tester private List field123; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -55,7 +55,7 @@ public class Tester private Dictionary< List, Dictionary> _field; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -95,7 +95,7 @@ public class Tester string _field3; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -131,7 +131,7 @@ public class Tester string[] field123; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -161,7 +161,7 @@ public class Tester const bool field = true; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -196,7 +196,7 @@ public class Tester int x = 19, y = 23, z = 42; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index a299814ccd..ff1553dbc9 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -21,7 +21,7 @@ class Tester get { return index.ToString(); } } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 07bb9ca1ce..5e873baa8c 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { interface IFoo { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -33,7 +33,7 @@ interface IFoo { } interface IBar : IFoo { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -54,7 +54,7 @@ interface IBar : IFoo { } interface IFoo { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -69,7 +69,7 @@ interface IFoo { } interface IFoo { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -84,7 +84,7 @@ interface IFoo { } interface IFoo where T1 : T2 { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index 096554025d..8215b6b806 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -19,7 +19,7 @@ public class Tester string test = $"hello {one} world {two}!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -54,7 +54,7 @@ public class Tester string test = $"hello world!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -82,7 +82,7 @@ public class Tester world!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -111,7 +111,7 @@ public class Tester string test = $@"hello {one} world {two}!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -147,7 +147,7 @@ public class Tester world {two}!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -183,7 +183,7 @@ public class Tester world!"; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index cf4e46d6ba..7eaad1390c 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { namespace TestNamespace { }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -32,7 +32,7 @@ namespace TestNamespace namespace Test.Namespace { }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -52,7 +52,7 @@ namespace TestNamespace } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -84,7 +84,7 @@ namespace TestNamespace using three = UsingThree.Something; } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index d7b129e3de..6feb21dd2c 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -20,7 +20,7 @@ class C { } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); tokens.should.contain(Tokens.Type("Foo", 2, 2)); diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 9d844e9b17..a4ea51a2ad 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -22,7 +22,7 @@ class Tester set { something = value; } } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -53,7 +53,7 @@ class Tester { public IBooom Property { get { return null; } private set { something = value; } } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -85,7 +85,7 @@ class Tester { IBooom Property {get; set;} }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -111,7 +111,7 @@ class Tester { public IBooom Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -138,7 +138,7 @@ class Tester { protected internal IBooom Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -170,7 +170,7 @@ class Tester set; } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -197,7 +197,7 @@ class Tester { public Dictionary[]> Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -235,7 +235,7 @@ class Tester public Dictionary[]> Property { get; } = new Dictionary[]>(); }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -288,7 +288,7 @@ public class Tester private bool prop2 => true; }`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index c385bbfa2f..4ef9128155 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { struct S { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), @@ -33,7 +33,7 @@ interface IFoo { } struct S : IFoo { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -54,7 +54,7 @@ struct S : IFoo { } struct S { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), @@ -69,7 +69,7 @@ struct S { } struct S where T1 : T2 { } `; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index 9f71534686..d7942ad299 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const input = ` using System;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -29,7 +29,7 @@ using System;`; const input = ` using static System.Console;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -45,7 +45,7 @@ using static System.Console;`; const input = ` using S = System;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -60,7 +60,7 @@ using S = System;`; const input = ` using C = System.Console;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -77,7 +77,7 @@ using C = System.Console;`; const input = ` using IntList = System.Collections.Generic.List;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -103,7 +103,7 @@ using IntList = System.Collections.Generic.List;`; const input = ` using X = System.Collections.Generic.Dictionary>;`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -142,7 +142,7 @@ using X = System.Collections.Generic.Dictionary/**/>/**/;//end`; - let tokens = TokenizerUtil.tokenize2(input); + let tokens = TokenizerUtil.tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts index 9928808447..c183db6084 100644 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ b/test/syntaxes/utils/tokenizerUtil.ts @@ -7,14 +7,9 @@ import { Tokenizer, Token } from './tokenizer'; export class TokenizerUtil { - private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp.json"); - private static _tokenizer2: Tokenizer = new Tokenizer("syntaxes/csharp2.json"); + private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp2.json"); public static tokenize(input: string, excludeTypes: boolean = true): Token[] { return TokenizerUtil._tokenizer.tokenize(input, excludeTypes); } - - public static tokenize2(input: string, excludeTypes: boolean = true): Token[] { - return TokenizerUtil._tokenizer2.tokenize(input, excludeTypes); - } } From 2c2a8cbbfda04e077fcedc6c8be1f7dde85e788b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 03:40:13 -0800 Subject: [PATCH 030/192] A bit more refactoring of tests --- test/syntaxes/attributes.test.syntax.ts | 23 +++++---- test/syntaxes/boolean-literals.test.syntax.ts | 37 ++++++++------ test/syntaxes/classes.test.syntax.ts | 17 +++---- test/syntaxes/comments.test.syntax.ts | 9 ++-- test/syntaxes/delegates.test.syntax.ts | 15 +++--- test/syntaxes/enums.test.syntax.ts | 17 +++---- test/syntaxes/events.test.syntax.ts | 17 +++---- test/syntaxes/extern-aliases.test.syntax.ts | 5 +- test/syntaxes/fields.test.syntax.ts | 15 +++--- test/syntaxes/indexers.test.syntax.ts | 7 ++- test/syntaxes/interfaces.test.syntax.ts | 17 +++---- .../interpolated-strings.test.syntax.ts | 19 +++---- test/syntaxes/namespaces.test.syntax.ts | 15 +++--- test/syntaxes/numeric-literals.test.syntax.ts | 26 ++++++---- test/syntaxes/properties.test.syntax.ts | 23 +++++---- test/syntaxes/structs.test.syntax.ts | 15 +++--- test/syntaxes/using-directives.test.syntax.ts | 17 +++---- test/syntaxes/utils/tokenizer.ts | 49 ++++++++----------- test/syntaxes/utils/tokenizerUtil.ts | 15 ------ 19 files changed, 159 insertions(+), 199 deletions(-) delete mode 100644 test/syntaxes/utils/tokenizerUtil.ts diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index b82e414260..e9c96cfb11 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -16,7 +15,7 @@ describe("Grammar", () => { const input = ` [Foo]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -29,7 +28,7 @@ describe("Grammar", () => { const input = ` [assembly: Foo]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -44,7 +43,7 @@ describe("Grammar", () => { const input = ` [module: Foo, Bar]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -61,7 +60,7 @@ describe("Grammar", () => { const input = ` [module: Foo(), Bar()]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -82,7 +81,7 @@ describe("Grammar", () => { const input = ` [Foo(true)]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -98,7 +97,7 @@ describe("Grammar", () => { const input = ` [Foo(true, 42)]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -116,7 +115,7 @@ describe("Grammar", () => { const input = ` [Foo(true, 42, "text")]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -138,7 +137,7 @@ describe("Grammar", () => { const input = ` [Foo(Bar = 42)]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -156,7 +155,7 @@ describe("Grammar", () => { const input = ` [Foo(true, Bar = 42)]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), @@ -176,7 +175,7 @@ describe("Grammar", () => { const input = ` [module: Foo(true, Bar = 42, Baz = "hello")]`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open(2, 1), diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index b9c9197d8b..fbf1ddd09c 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -4,49 +4,56 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); - describe.skip("Literals - boolean", () => { + describe("Literals - boolean", () => { it("true", () => { const input = ` class C { - method M() { - var x = true; - } + bool x = true; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), Tokens.Identifiers.ClassName("C", 2, 7), Tokens.Puncuation.CurlyBrace.Open(2, 9), - Tokens.Literals.Boolean.True(4, 17), - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + + Tokens.Type("bool", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 10), + Tokens.Operators.Assignment(3, 12), + Tokens.Literals.Boolean.True(3, 14), + Tokens.Puncuation.Semicolon(3, 18), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); it("false", () => { const input = ` class C { - method M() { - var x = false; - } + bool x = false; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), Tokens.Identifiers.ClassName("C", 2, 7), Tokens.Puncuation.CurlyBrace.Open(2, 9), - Tokens.Literals.Boolean.False(4, 17), - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + + Tokens.Type("bool", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 10), + Tokens.Operators.Assignment(3, 12), + Tokens.Literals.Boolean.False(3, 14), + Tokens.Puncuation.Semicolon(3, 19), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 6271cc60d0..fb4312f2e7 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -34,7 +33,7 @@ namespace TestNamespace abstract class DefaultAbstractClass { } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -107,7 +106,7 @@ namespace TestNamespace { class Dictionary { } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -131,7 +130,7 @@ namespace TestNamespace class PublicClass : Root.IInterface, Something.IInterfaceTwo { } class PublicClass : Dictionary>, IMap> { } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -209,7 +208,7 @@ namespace TestNamespace { } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -272,7 +271,7 @@ namespace TestNamespace } } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -294,6 +293,4 @@ namespace TestNamespace Tokens.Puncuation.CurlyBrace.Close(11, 1)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 96466026ce..047724b6d0 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -16,7 +15,7 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.SingleLine.Start(2, 1), @@ -28,7 +27,7 @@ describe("Grammar", () => { const input = ` // foo`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.LeadingWhitespace(" ", 2, 1), @@ -41,7 +40,7 @@ describe("Grammar", () => { const input = ` /* foo */`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.MultiLine.Start(2, 1), diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 98882479f5..c7472d22ac 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { delegate void D(); `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -34,7 +33,7 @@ delegate void D(); delegate TResult D(T arg1); `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -54,7 +53,7 @@ delegate void D() where T1 : T2; `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -75,7 +74,7 @@ delegate void D() delegate int D(ref string x, out int y, params object[] z); `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate(2, 1), @@ -99,6 +98,4 @@ delegate int D(ref string x, out int y, params object[] z); Tokens.Puncuation.Semicolon(2, 59)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index b24681d777..2b168b4f98 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { enum E { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -32,7 +31,7 @@ enum E { } enum E : byte { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -49,7 +48,7 @@ enum E : byte { } enum E { M1 } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -65,7 +64,7 @@ enum E { M1 } enum Color { Red, Green, Blue } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -90,7 +89,7 @@ enum E } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum(2, 1), @@ -106,6 +105,4 @@ enum E Tokens.Puncuation.CurlyBrace.Close(7, 1)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index d22becdb72..d09e030489 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -19,7 +18,7 @@ public class Tester public event Type Event; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -44,7 +43,7 @@ public class Tester protected internal event Type Event; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -70,7 +69,7 @@ public class Tester public event Type Event1, Event2; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -97,7 +96,7 @@ public class Tester public event EventHandler, Dictionary> Event; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -139,7 +138,7 @@ public class Tester } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -163,6 +162,4 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(9, 1)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index c534f6f654..064d3eb177 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { extern alias X; extern alias Y;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Extern(2, 1), diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 2ad57bc316..2698c4fab6 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -21,7 +20,7 @@ public class Tester private List field123; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -55,7 +54,7 @@ public class Tester private Dictionary< List, Dictionary> _field; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -95,7 +94,7 @@ public class Tester string _field3; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -131,7 +130,7 @@ public class Tester string[] field123; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -161,7 +160,7 @@ public class Tester const bool field = true; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -196,7 +195,7 @@ public class Tester int x = 19, y = 23, z = 42; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index ff1553dbc9..2c1c60f4b3 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -21,7 +20,7 @@ class Tester get { return index.ToString(); } } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -45,4 +44,4 @@ class Tester Tokens.Puncuation.CurlyBrace.Close(8, 1)]); }); }); -}); +}); \ No newline at end of file diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 5e873baa8c..776a9b1c2e 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { interface IFoo { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -33,7 +32,7 @@ interface IFoo { } interface IBar : IFoo { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -54,7 +53,7 @@ interface IBar : IFoo { } interface IFoo { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -69,7 +68,7 @@ interface IFoo { } interface IFoo { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -84,7 +83,7 @@ interface IFoo { } interface IFoo where T1 : T2 { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -97,6 +96,4 @@ interface IFoo where T1 : T2 { } Tokens.Puncuation.CurlyBrace.Close(2, 40)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index 8215b6b806..de7c1ce279 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -19,7 +18,7 @@ public class Tester string test = $"hello {one} world {two}!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -54,7 +53,7 @@ public class Tester string test = $"hello world!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -82,7 +81,7 @@ public class Tester world!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -111,7 +110,7 @@ public class Tester string test = $@"hello {one} world {two}!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -147,7 +146,7 @@ public class Tester world {two}!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -183,7 +182,7 @@ public class Tester world!"; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -203,6 +202,4 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index 7eaad1390c..9d47090362 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { namespace TestNamespace { }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -32,7 +31,7 @@ namespace TestNamespace namespace Test.Namespace { }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -52,7 +51,7 @@ namespace TestNamespace } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Namespace(2, 1), @@ -84,7 +83,7 @@ namespace TestNamespace using three = UsingThree.Something; } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -135,6 +134,4 @@ namespace TestNamespace Tokens.Puncuation.CurlyBrace.Close(15, 1)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 6feb21dd2c..86ca2a55f8 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -4,27 +4,33 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); - describe.skip("Literals - numeric", () => { + describe("Literals - numeric", () => { it("decimal zero", () => { const input = ` class C { - method M() { - var x = 0; - } + int x = 0; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Open(2, 1)); - tokens.should.contain(Tokens.Type("Foo", 2, 2)); - tokens.should.contain(Tokens.Puncuation.SquareBracket.Close(2, 5)); + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + + Tokens.Type("int", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 9), + Tokens.Operators.Assignment(3, 11), + Tokens.Literals.Numeric.Decimal("0", 3, 13), + Tokens.Puncuation.Semicolon(3, 14), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index a4ea51a2ad..6fd6dda173 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -22,7 +21,7 @@ class Tester set { something = value; } } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -53,7 +52,7 @@ class Tester { public IBooom Property { get { return null; } private set { something = value; } } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -85,7 +84,7 @@ class Tester { IBooom Property {get; set;} }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -111,7 +110,7 @@ class Tester { public IBooom Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -138,7 +137,7 @@ class Tester { protected internal IBooom Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -170,7 +169,7 @@ class Tester set; } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -197,7 +196,7 @@ class Tester { public Dictionary[]> Property { get; set; } }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -235,7 +234,7 @@ class Tester public Dictionary[]> Property { get; } = new Dictionary[]>(); }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Class(2, 1), @@ -288,7 +287,7 @@ public class Tester private bool prop2 => true; }`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(2, 1), @@ -315,4 +314,4 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(6, 1)]); }); }); -}); +}); \ No newline at end of file diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index 4ef9128155..86306f00cb 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -17,7 +16,7 @@ describe("Grammar", () => { struct S { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), @@ -33,7 +32,7 @@ interface IFoo { } struct S : IFoo { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface(2, 1), @@ -54,7 +53,7 @@ struct S : IFoo { } struct S { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), @@ -69,7 +68,7 @@ struct S { } struct S where T1 : T2 { } `; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct(2, 1), @@ -82,6 +81,4 @@ struct S where T1 : T2 { } Tokens.Puncuation.CurlyBrace.Close(2, 34)]); }); }); -}); - - +}); \ No newline at end of file diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index d7942ad299..3468de01a8 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -4,8 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { Tokens } from './utils/tokenizer'; -import { TokenizerUtil } from './utils/tokenizerUtil'; +import { tokenize, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -16,7 +15,7 @@ describe("Grammar", () => { const input = ` using System;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -29,7 +28,7 @@ using System;`; const input = ` using static System.Console;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -45,7 +44,7 @@ using static System.Console;`; const input = ` using S = System;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -60,7 +59,7 @@ using S = System;`; const input = ` using C = System.Console;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -77,7 +76,7 @@ using C = System.Console;`; const input = ` using IntList = System.Collections.Generic.List;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -103,7 +102,7 @@ using IntList = System.Collections.Generic.List;`; const input = ` using X = System.Collections.Generic.Dictionary>;`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), @@ -142,7 +141,7 @@ using X = System.Collections.Generic.Dictionary/**/>/**/;//end`; - let tokens = TokenizerUtil.tokenize(input); + let tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using(2, 1), diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index a693cf4d90..70b334e2ca 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -3,46 +3,39 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { ITokenizeLineResult, Registry, IGrammar, StackElement } from 'vscode-textmate'; +import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; -export class Tokenizer { - private _registry: Registry; - private _grammar: IGrammar; +const registry = new Registry(); +const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp2.json'); +const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] - private static readonly _excludedTypes: string[] = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs']; +export function tokenize(input: string, excludeTypes: boolean = true): Token[] { + let tokens: Token[] = []; - constructor(grammarFilePath: string) { - this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath); - } - - public tokenize(input: string, excludeTypes?: boolean): Token[] { - let tokens: Token[] = []; - - // ensure consistent line-endings irrelevant of OS - input = input.replace('\r\n', '\n'); + // ensure consistent line-endings irrelevant of OS + input = input.replace('\r\n', '\n'); - let previousStack: StackElement = null; + let previousStack: StackElement = null; - const lines: string[] = input.split('\n'); + const lines = input.split('\n'); - for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { - const line = lines[lineIndex]; + for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { + const line = lines[lineIndex]; - let result: ITokenizeLineResult = this._grammar.tokenizeLine(line, previousStack); - previousStack = result.ruleStack; + let lineResult = grammar.tokenizeLine(line, previousStack); + previousStack = lineResult.ruleStack; - for (const token of result.tokens) { - const text = line.substring(token.startIndex, token.endIndex); - const type: string = token.scopes[token.scopes.length - 1]; + for (const token of lineResult.tokens) { + const text = line.substring(token.startIndex, token.endIndex); + const type = token.scopes[token.scopes.length - 1]; - if (excludeTypes === false || Tokenizer._excludedTypes.indexOf(type) < 0) { - tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); - } + if (excludeTypes === false || excludedTypes.indexOf(type) < 0) { + tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); } } - - return tokens; } + + return tokens; } export class Token { diff --git a/test/syntaxes/utils/tokenizerUtil.ts b/test/syntaxes/utils/tokenizerUtil.ts deleted file mode 100644 index c183db6084..0000000000 --- a/test/syntaxes/utils/tokenizerUtil.ts +++ /dev/null @@ -1,15 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { Tokenizer, Token } from './tokenizer'; - -export class TokenizerUtil -{ - private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp2.json"); - - public static tokenize(input: string, excludeTypes: boolean = true): Token[] { - return TokenizerUtil._tokenizer.tokenize(input, excludeTypes); - } -} From fc096de1084c3d92b1f56f84d73eee51579662f3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 03:44:42 -0800 Subject: [PATCH 031/192] Add a few more numeric literal tests --- syntaxes/csharp2.json | 10 +-- test/syntaxes/numeric-literals.test.syntax.ts | 69 +++++++++++++++++++ 2 files changed, 74 insertions(+), 5 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index a1ddfe0525..29945a9d5d 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -1243,11 +1243,6 @@ "name": "constant.numeric.binary.cs", "match": "\\b0(b|B)[01_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" }, - { - "comment": "C# grammar: decimal-digits integer-type-suffix[opt]", - "name": "constant.numeric.decimal.cs", - "match": "\\b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" - }, { "comment": "C# grammar: decimal-digits . decimal-digits exponent-part[opt] real-type-suffix[opt] OR . decimal-digits exponent-part[opt] real-type-suffix[opt]", "name": "constant.numeric.decimal.cs", @@ -1262,6 +1257,11 @@ "comment": "C# grammar: decimal-digits real-type-suffix", "name": "constant.numeric.decimal.cs", "match": "\\b[0-9_]+(F|f|D|d|M|m)" + }, + { + "comment": "C# grammar: decimal-digits integer-type-suffix[opt]", + "name": "constant.numeric.decimal.cs", + "match": "\\b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" } ] }, diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 86ca2a55f8..5e9056c890 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -32,5 +32,74 @@ class C { Tokens.Puncuation.CurlyBrace.Close(4, 1)]); }); + + it("hexadecimal zero", () => { + + const input = ` +class C { + int x = 0x0; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + + Tokens.Type("int", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 9), + Tokens.Operators.Assignment(3, 11), + Tokens.Literals.Numeric.Hexadecimal("0x0", 3, 13), + Tokens.Puncuation.Semicolon(3, 16), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + }); + + it("binary zero", () => { + + const input = ` +class C { + int x = 0b0; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + + Tokens.Type("int", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 9), + Tokens.Operators.Assignment(3, 11), + Tokens.Literals.Numeric.Binary("0b0", 3, 13), + Tokens.Puncuation.Semicolon(3, 16), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + }); + + it("floating-point zero", () => { + + const input = ` +class C { + float x = 0.0; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("C", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(2, 9), + + Tokens.Type("float", 3, 5), + Tokens.Identifiers.FieldName("x", 3, 11), + Tokens.Operators.Assignment(3, 13), + Tokens.Literals.Numeric.Decimal("0.0", 3, 15), + Tokens.Puncuation.Semicolon(3, 18), + + Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + }); }); }); \ No newline at end of file From 81ba9822063c3b2ea90d71c6c6bc8c8690366565 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 04:09:02 -0800 Subject: [PATCH 032/192] Refine interpolated strings and escaped braces --- syntaxes/csharp2.json | 5 +- .../interpolated-strings.test.syntax.ts | 91 +++++++++++++++++++ 2 files changed, 95 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 29945a9d5d..f5f8c30278 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -1181,9 +1181,12 @@ }, "interpolation": { "name": "meta.interpolation.cs", - "begin": "(?<=[^\\{])(?:\\{\\{)*(\\{)(?=[^\\{])", + "begin": "(?<=[^\\{])((?:\\{\\{)*)(\\{)(?=[^\\{])", "beginCaptures": { "1": { + "name": "string.quoted.double.cs" + }, + "2": { "name": "punctuation.definition.interpolation.begin.cs" } }, diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index de7c1ce279..498fef3217 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -72,6 +72,97 @@ public class Tester Tokens.Puncuation.CurlyBrace.Close(5, 1)]); }); + it("no interpolations due to escaped braces", () => { + + const input = ` +public class Tester +{ + string test = $"hello {{one}} world {{two}}!"; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hello {{one}} world {{two}}!", 4, 21), + Tokens.Puncuation.InterpolatedString.End(4, 49), + Tokens.Puncuation.Semicolon(4, 50), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("two interpolations with escaped braces", () => { + + const input = ` +public class Tester +{ + string test = $"hello {{{one}}} world {{{two}}}!"; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hello ", 4, 21), + Tokens.Literals.String("{{", 4, 27), + Tokens.Puncuation.Interpolation.Begin(4, 29), + Tokens.Variables.ReadWrite("one", 4, 30), + Tokens.Puncuation.Interpolation.End(4, 33), + Tokens.Literals.String("}} world ", 4, 34), + Tokens.Literals.String("{{", 4, 43), + Tokens.Puncuation.Interpolation.Begin(4, 45), + Tokens.Variables.ReadWrite("two", 4, 46), + Tokens.Puncuation.Interpolation.End(4, 49), + Tokens.Literals.String("}}!", 4, 50), + Tokens.Puncuation.InterpolatedString.End(4, 53), + Tokens.Puncuation.Semicolon(4, 54), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("no interpolations due to double-escaped braces", () => { + + const input = ` +public class Tester +{ + string test = $"hello {{{{one}}}} world {{{{two}}}}!"; +}`; + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Modifiers.Public(2, 1), + Tokens.Keywords.Class(2, 8), + Tokens.Identifiers.ClassName("Tester", 2, 14), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("string", 4, 5), + Tokens.Identifiers.FieldName("test", 4, 12), + Tokens.Operators.Assignment(4, 17), + Tokens.Puncuation.InterpolatedString.Begin(4, 19), + Tokens.Literals.String("hello {{{{one}}}} world {{{{two}}}}!", 4, 21), + Tokens.Puncuation.InterpolatedString.End(4, 57), + Tokens.Puncuation.Semicolon(4, 58), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + it("break across two lines (non-verbatim)", () => { const input = ` From 7a2b7fc1427e70a6752935d9ce60bd5b8ffbe5ad Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 04:12:30 -0800 Subject: [PATCH 033/192] Update TODO list for declarations --- syntaxes/syntax.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index f8a39c2764..49d3e1059f 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,4 +1,15 @@ -Important regular expressions: +## TODO List: + +* Declaratiosn: + * Explicitly-implemented interface members + * Constructor declarations + * Destructor declarations + * Method declarations + * Operator declarations + * Conversion operator declarations + * Interface members + +## Important regular expressions: #### Identifier From f6e72122e41304f5bfa6467ba582f84754868332 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 04:35:37 -0800 Subject: [PATCH 034/192] Refine type name regular expressions a bit more --- syntaxes/csharp2.json | 16 ++++++++-------- syntaxes/syntax.md | 27 ++++++++++++++++++--------- 2 files changed, 26 insertions(+), 17 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index f5f8c30278..c06c4032c3 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -733,14 +733,14 @@ ] }, "field-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "begin": "\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", + "begin": "\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", "beginCaptures": { "1": { "patterns": [ @@ -785,14 +785,14 @@ ] }, "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", "captures": { "1": { "patterns": [ @@ -928,14 +928,14 @@ ] }, "indexer-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", + "match": "\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?this)\\s*(?=\\[)", "captures": { "1": { "patterns": [ @@ -982,14 +982,14 @@ ] }, "event-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", "captures": { "1": { "patterns": [ diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 49d3e1059f..efa821c121 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -43,7 +43,7 @@ #### Type name -* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` +* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` * Matches: `System.Collections.Generic.Dictionary, System.List>>` #### Field declaratiosn @@ -51,20 +51,20 @@ Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` * Break down: * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` * First field name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?!=>)(?:;|=)` #### Event declarations -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` * Break down: * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Event keyword: `\s*\b(?event)\b` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?=\{|;|$)` @@ -73,19 +73,28 @@ Further field names are matched by looking for identifiers, #punctuation-comma, Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. -* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` +* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` * Break down: * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` * End: `\s*(?:\{|=>|$))` #### Indexer declarations -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?this)\s*(?:\[))` * Break down: * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` * Property name: `\s+(?this)` + * End: `\s*(?:\[))` + +#### Method declarations + +* Expression: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\[))` +* Break down: + * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` + * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Method name and type parameters: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` * End: `\s*(?:\[))` \ No newline at end of file From ed44f412b0c5747ee0122928c3c13e230590b251 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 05:24:29 -0800 Subject: [PATCH 035/192] Add method declarations --- syntaxes/csharp2.json | 243 ++++++++++++--------------- syntaxes/syntax.md | 4 +- test/syntaxes/methods.test.syntax.ts | 70 ++++++++ test/syntaxes/utils/tokenizer.ts | 3 + 4 files changed, 186 insertions(+), 134 deletions(-) create mode 100644 test/syntaxes/methods.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index c06c4032c3..ce09f41266 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -84,6 +84,9 @@ { "include": "#field-declaration" }, + { + "include": "#method-declaration" + }, { "include": "#punctuation-semicolon" } @@ -109,6 +112,9 @@ { "include": "#variable-initializer" }, + { + "include": "#method-declaration" + }, { "include": "#punctuation-semicolon" } @@ -829,104 +835,6 @@ } ] }, - "property-accessors": { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "match": "\\b((?:(?:private|protected|internal)\\s+)*)\\b(get|set)\\s*(;)", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(private|protected|internal)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "match": "get", - "name": "keyword.other.get.cs" - }, - { - "match": "set", - "name": "keyword.other.set.cs" - } - ] - }, - "3": { - "patterns": [ - { - "include": "#statements" - }, - { - "include": "#punctuation-semicolon" - } - ] - } - } - }, - { - "begin": "\\b((?:(?:private|protected|internal)\\s+)*)\\b(get|set)\\b\\s*(\\{)", - "beginCaptures": { - "1": { - "patterns": [ - { - "match": "\\b(private|protected|internal)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "match": "get", - "name": "keyword.other.get.cs" - }, - { - "match": "set", - "name": "keyword.other.set.cs" - } - ] - }, - "3": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#punctuation-semicolon" - } - ] - } - ] - }, "indexer-declaration": { "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", @@ -1034,6 +942,40 @@ } ] }, + "property-accessors": { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "match": "\\b(private|protected|internal)\\b", + "name": "storage.modifier.cs" + }, + { + "match": "\\b(get)\\b", + "name": "keyword.other.get.cs" + }, + { + "match": "\\b(set)\\b", + "name": "keyword.other.set.cs" + }, + { + "include": "#block" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, "event-accessors": { "begin": "\\{", "beginCaptures": { @@ -1049,65 +991,100 @@ }, "patterns": [ { - "match": "\\b(add|remove)\\s*(;)", + "match": "\\b(add)\\b", + "name": "keyword.other.add.cs" + }, + { + "match": "\\b(remove)\\b", + "name": "keyword.other.remove.cs" + }, + { + "include": "#block" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, + "method-declaration": { + "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", "captures": { "1": { "patterns": [ { - "match": "add", - "name": "keyword.other.add.cs" - }, - { - "match": "remove", - "name": "keyword.other.remove.cs" + "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } } ] }, "2": { "patterns": [ { - "include": "#punctuation-semicolon" + "include": "#type" } ] + }, + "3": { + "name": "entity.name.function.cs" } } }, { - "begin": "\\b(add|remove)\\b\\s*(\\{)", - "beginCaptures": { - "1": { - "patterns": [ - { - "match": "add", - "name": "keyword.other.add.cs" - }, - { - "match": "remove", - "name": "keyword.other.remove.cs" - } - ] - }, - "2": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, + "begin": "(?=\\()", + "end": "(?=\\))", "patterns": [ { - "include": "#statements" - }, + "include": "#parenthesized-parameter-list" + } + ] + }, + { + "include": "#expression-body" + }, + { + "begin": "(?=\\{)", + "end": "(?=\\})", + "patterns": [ { - "include": "#punctuation-semicolon" + "include": "#block" } ] } ] }, + "block": { + "begin": "\\{", + "beginCaptures": { + "0": { + "name": "punctuation.curlybrace.open.cs" + } + }, + "end": "\\}", + "endCaptures": { + "0": { + "name": "punctuation.curlybrace.close.cs" + } + }, + "patterns": [ + { + "include": "#statements" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, "variable-initializer": { "begin": "(?)", "beginCaptures": { diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index efa821c121..f596465277 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -9,6 +9,8 @@ * Conversion operator declarations * Interface members +* Lots of refinement and tests to ensure proper highlighting while typing + ## Important regular expressions: #### Identifier @@ -92,7 +94,7 @@ if you consider that regular expressions don't know that "class" is a keyword. T #### Method declarations -* Expression: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\[))` +* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` * Break down: * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts new file mode 100644 index 0000000000..49283b367d --- /dev/null +++ b/test/syntaxes/methods.test.syntax.ts @@ -0,0 +1,70 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Tokens } from './utils/tokenizer'; + +describe("Grammar", () => { + before(() => should()); + + describe("Methods", () => { + it("single-line declaration with no parameters", () => { + + const input = ` +class Tester +{ + void Foo() { } +}`; + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("void", 4, 5), + Tokens.Identifiers.MethodName("Foo", 4, 10), + Tokens.Puncuation.Parenthesis.Open(4, 13), + Tokens.Puncuation.Parenthesis.Close(4, 14), + Tokens.Puncuation.CurlyBrace.Open(4, 16), + Tokens.Puncuation.CurlyBrace.Close(4, 18), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); + + it("declaration with two parameters", () => { + + const input = ` +class Tester +{ + int Add(int x, int y) + { + return x + y; + } +}`; + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("int", 4, 5), + Tokens.Identifiers.MethodName("Add", 4, 9), + Tokens.Puncuation.Parenthesis.Open(4, 12), + Tokens.Type("int", 4, 13), + Tokens.Variables.Parameter("x", 4, 17), + Tokens.Puncuation.Comma(4, 18), + Tokens.Type("int", 4, 20), + Tokens.Variables.Parameter("y", 4, 24), + Tokens.Puncuation.Parenthesis.Close(4, 25), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Puncuation.Semicolon(6, 21), + Tokens.Puncuation.CurlyBrace.Close(7, 5), + + Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 70b334e2ca..19273f227c 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -104,6 +104,9 @@ export namespace Tokens { export const InterfaceName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.interface.cs', line, column); + export const MethodName = (text: string, line?: number, column?: number) => + createToken(text, 'entity.name.function.cs', line, column); + export const NamespaceName = (text: string, line?: number, column?: number) => createToken(text, 'entity.name.type.namespace.cs', line, column); From 04f8754fcfb4b7b2f4ef258e62a15a15f06baeab Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 10:01:51 -0800 Subject: [PATCH 036/192] Add return statement and a handful of operators --- syntaxes/csharp2.json | 53 +++++++++++++++++++------ test/syntaxes/indexers.test.syntax.ts | 3 ++ test/syntaxes/methods.test.syntax.ts | 36 +++++++++++++++++ test/syntaxes/properties.test.syntax.ts | 4 ++ test/syntaxes/utils/tokenizer.ts | 22 +++++++++- 5 files changed, 105 insertions(+), 13 deletions(-) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index ce09f41266..2458287a18 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -120,14 +120,15 @@ } ] }, - "statements": { - "patterns": [] - }, - "expressions": { + "statement": { "patterns": [ { - "include": "#object-creation-expression" - }, + "include": "#return-statement" + } + ] + }, + "expression": { + "patterns": [ { "include": "#interpolated-string" }, @@ -137,6 +138,12 @@ { "include": "#literal" }, + { + "include": "#expression-operators" + }, + { + "include": "#object-creation-expression" + }, { "include": "#identifier" } @@ -281,7 +288,7 @@ "include": "#attribute-named-argument" }, { - "include": "#expressions" + "include": "#expression" }, { "include": "#punctuation-comma" @@ -301,7 +308,7 @@ "include": "#operator-assignment" }, { - "include": "#expressions" + "include": "#expression" } ] }, @@ -1078,7 +1085,7 @@ }, "patterns": [ { - "include": "#statements" + "include": "#statement" }, { "include": "#punctuation-semicolon" @@ -1095,7 +1102,7 @@ "end": "(?=[,\\);}])", "patterns": [ { - "include": "#expressions" + "include": "#expression" } ] }, @@ -1109,7 +1116,21 @@ "end": "(?=[,\\);}])", "patterns": [ { - "include": "#expressions" + "include": "#expression" + } + ] + }, + "return-statement": { + "begin": "\\b(return)\\b", + "beginCaptures": { + "1": { + "name": "keyword.other.return.cs" + } + }, + "end": "(?=;)", + "patterns": [ + { + "include": "#expression" } ] }, @@ -1175,7 +1196,7 @@ }, "patterns": [ { - "include": "#expressions" + "include": "#expression" } ] }, @@ -1272,6 +1293,14 @@ "name": "constant.character.escape.cs", "match": "\\\\." }, + "expression-operators": { + "patterns": [ + { + "name": "keyword.operator.arithmetic.cs", + "match": "%|\\*|/|-|\\+" + } + ] + }, "identifier": { "name": "variable.other.readwrite.cs", "match": "[_$[:alpha:]][_$[:alnum:]]*" diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 2c1c60f4b3..5bd31d9237 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -37,6 +37,9 @@ class Tester Tokens.Puncuation.CurlyBrace.Open(5, 5), Tokens.Keywords.Get(6, 9), Tokens.Puncuation.CurlyBrace.Open(6, 13), + Tokens.Keywords.Return(6, 15), + Tokens.Variables.ReadWrite("index", 6, 22), + Tokens.Variables.ReadWrite("ToString", 6, 28), Tokens.Puncuation.Semicolon(6, 38), Tokens.Puncuation.CurlyBrace.Close(6, 40), Tokens.Puncuation.CurlyBrace.Close(7, 5), diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 49283b367d..200f23afc5 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -61,10 +61,46 @@ class Tester Tokens.Variables.Parameter("y", 4, 24), Tokens.Puncuation.Parenthesis.Close(4, 25), Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Return(6, 9), + Tokens.Variables.ReadWrite("x", 6, 16), + Tokens.Operators.Arithmetic.Addition(6, 18), + Tokens.Variables.ReadWrite("y", 6, 20), Tokens.Puncuation.Semicolon(6, 21), Tokens.Puncuation.CurlyBrace.Close(7, 5), Tokens.Puncuation.CurlyBrace.Close(8, 1)]); }); + + it("expression body", () => { + + const input = ` +class Tester +{ + int Add(int x, int y) => x + y; +}`; + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("int", 4, 5), + Tokens.Identifiers.MethodName("Add", 4, 9), + Tokens.Puncuation.Parenthesis.Open(4, 12), + Tokens.Type("int", 4, 13), + Tokens.Variables.Parameter("x", 4, 17), + Tokens.Puncuation.Comma(4, 18), + Tokens.Type("int", 4, 20), + Tokens.Variables.Parameter("y", 4, 24), + Tokens.Puncuation.Parenthesis.Close(4, 25), + Tokens.Operators.Arrow(4, 27), + Tokens.Variables.ReadWrite("x", 4, 30), + Tokens.Operators.Arithmetic.Addition(4, 32), + Tokens.Variables.ReadWrite("y", 4, 34), + Tokens.Puncuation.Semicolon(4, 35), + + Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 6fd6dda173..0e5eeda52a 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -34,6 +34,8 @@ class Tester Tokens.Puncuation.CurlyBrace.Open(5, 5), Tokens.Keywords.Get(6, 9), Tokens.Puncuation.CurlyBrace.Open(6, 13), + Tokens.Keywords.Return(6, 15), + Tokens.Literals.Null(6, 22), Tokens.Puncuation.Semicolon(6, 26), Tokens.Puncuation.CurlyBrace.Close(6, 28), Tokens.Keywords.Set(7, 9), @@ -65,6 +67,8 @@ class Tester Tokens.Puncuation.CurlyBrace.Open(4, 28), Tokens.Keywords.Get(4, 30), Tokens.Puncuation.CurlyBrace.Open(4, 34), + Tokens.Keywords.Return(4, 36), + Tokens.Literals.Null(4, 43), Tokens.Puncuation.Semicolon(4, 47), Tokens.Puncuation.CurlyBrace.Close(4, 49), Tokens.Keywords.Modifiers.Private(4, 51), diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 19273f227c..8ec277499a 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -201,6 +201,9 @@ export namespace Tokens { export const Remove = (line?: number, column?: number) => createToken('remove', 'keyword.other.remove.cs', line, column); + export const Return = (line?: number, column?: number) => + createToken('return', 'keyword.other.return.cs', line, column); + export const Set = (line?: number, column?: number) => createToken('set', 'keyword.other.set.cs', line, column); @@ -251,9 +254,26 @@ export namespace Tokens { export const Arrow = (line?: number, column?: number) => createToken('=>', 'keyword.operator.arrow.cs', line, column); + export namespace Arithmetic { + export const Addition = (line?: number, column?: number) => + createToken('+', 'keyword.operator.arithmetic.cs', line, column); + + export const Division = (line?: number, column?: number) => + createToken('/', 'keyword.operator.arithmetic.cs', line, column); + + export const Multiplication = (line?: number, column?: number) => + createToken('*', 'keyword.operator.arithmetic.cs', line, column); + + export const Remainder = (line?: number, column?: number) => + createToken('%', 'keyword.operator.arithmetic.cs', line, column); + + export const Subtraction = (line?: number, column?: number) => + createToken('-', 'keyword.operator.arithmetic.cs', line, column); + } + export const Assignment = (line?: number, column?: number) => createToken('=', 'keyword.operator.assignment.cs', line, column); - } + } export namespace Puncuation { export const Accessor = (line?: number, column?: number) => From 02bb1e9e29d408fdac4f7b864c33fef55257d280 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 11:22:11 -0800 Subject: [PATCH 037/192] Add a way to tokenize specific bits of code in tests --- syntaxes/csharp2.json | 30 ++++--- syntaxes/syntax.md | 1 - .../interation-statements.test.syntax.ts | 36 ++++++++ test/syntaxes/properties.test.syntax.ts | 6 ++ test/syntaxes/utils/tokenizer.ts | 82 ++++++++++++++----- 5 files changed, 119 insertions(+), 36 deletions(-) create mode 100644 test/syntaxes/interation-statements.test.syntax.ts diff --git a/syntaxes/csharp2.json b/syntaxes/csharp2.json index 2458287a18..a756948f41 100644 --- a/syntaxes/csharp2.json +++ b/syntaxes/csharp2.json @@ -123,7 +123,16 @@ "statement": { "patterns": [ { - "include": "#return-statement" + "include": "#comment" + }, + { + "include": "#control-statement" + }, + { + "include": "#expression" + }, + { + "include": "#punctuation-semicolon" } ] }, @@ -1086,9 +1095,6 @@ "patterns": [ { "include": "#statement" - }, - { - "include": "#punctuation-semicolon" } ] }, @@ -1120,17 +1126,11 @@ } ] }, - "return-statement": { - "begin": "\\b(return)\\b", - "beginCaptures": { - "1": { - "name": "keyword.other.return.cs" - } - }, - "end": "(?=;)", + "control-statement": { "patterns": [ { - "include": "#expression" + "name": "keyword.control.flow.cs", + "match": "(? { + before(() => should()); + + describe.skip("Iteration statements (loops)", () => { + it("single-line declaration with no parameters", () => { + + const input = Input.InMethodBody(` +while (true) { } +`); + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Tokens.Keywords.Class(2, 1), + Tokens.Identifiers.ClassName("Tester", 2, 7), + Tokens.Puncuation.CurlyBrace.Open(3, 1), + + Tokens.Type("void", 4, 5), + Tokens.Identifiers.MethodName("Foo", 4, 10), + Tokens.Puncuation.Parenthesis.Open(4, 13), + Tokens.Puncuation.Parenthesis.Close(4, 14), + Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Puncuation.CurlyBrace.Close(7, 5), + + Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 0e5eeda52a..10b65435e7 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -40,6 +40,9 @@ class Tester Tokens.Puncuation.CurlyBrace.Close(6, 28), Tokens.Keywords.Set(7, 9), Tokens.Puncuation.CurlyBrace.Open(7, 13), + Tokens.Variables.ReadWrite("something", 7, 15), + Tokens.Operators.Assignment(7, 25), + Tokens.Variables.ReadWrite("value", 7, 27), Tokens.Puncuation.Semicolon(7, 32), Tokens.Puncuation.CurlyBrace.Close(7, 34), Tokens.Puncuation.CurlyBrace.Close(8, 5), @@ -74,6 +77,9 @@ class Tester Tokens.Keywords.Modifiers.Private(4, 51), Tokens.Keywords.Set(4, 59), Tokens.Puncuation.CurlyBrace.Open(4, 63), + Tokens.Variables.ReadWrite("something", 4, 65), + Tokens.Operators.Assignment(4, 75), + Tokens.Variables.ReadWrite("value", 4, 77), Tokens.Puncuation.Semicolon(4, 82), Tokens.Puncuation.CurlyBrace.Close(4, 84), Tokens.Puncuation.CurlyBrace.Close(4, 86), diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 8ec277499a..4a3dd92854 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -9,23 +9,30 @@ const registry = new Registry(); const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp2.json'); const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] -export function tokenize(input: string, excludeTypes: boolean = true): Token[] { - let tokens: Token[] = []; - - // ensure consistent line-endings irrelevant of OS - input = input.replace('\r\n', '\n'); +export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { + if (typeof input === "string") { + input = Input.FromText(input); + } + let tokens: Token[] = []; let previousStack: StackElement = null; - const lines = input.split('\n'); - - for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { - const line = lines[lineIndex]; + for (let lineIndex = 0; lineIndex < input.lines.length; lineIndex++) { + const line = input.lines[lineIndex]; let lineResult = grammar.tokenizeLine(line, previousStack); previousStack = lineResult.ruleStack; + if (lineIndex < input.span.startLine || lineIndex > input.span.endLine) { + continue; + } + for (const token of lineResult.tokens) { + if ((lineIndex === input.span.startLine && token.startIndex < input.span.startIndex) || + (lineIndex === input.span.endLine && token.endIndex > input.span.endIndex)) { + continue; + } + const text = line.substring(token.startIndex, token.endIndex); const type = token.scopes[token.scopes.length - 1]; @@ -38,18 +45,49 @@ export function tokenize(input: string, excludeTypes: boolean = true): Token[] { return tokens; } -export class Token { - constructor(text: string, type: string, line?: number, column?: number) { - this.text = text; - this.type = type; - this.column = column; - this.line = line; +export class Span { + constructor( + public startLine: number, + public startIndex: number, + public endLine: number, + public endIndex: number) { } +} + +export class Input { + private constructor( + public lines: string[], + public span: Span) { } + + public static FromText(text: string) { + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, new Span(0, 0, lines.length - 1, lines[lines.length - 1].length)); + } + + public static InMethodBody(input: string) { + let text = ` +class Tester { + void M() { + ${input} } +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); - public text: string; - public type: string; - public line: number; - public column: number; + return new Input(lines, new Span(3, 8, lines.length - 1, 0)); + } +} + +export class Token { + constructor( + public text: string, + public type: string, + public line?: number, + public column?: number) { } } export namespace Tokens { @@ -202,7 +240,7 @@ export namespace Tokens { createToken('remove', 'keyword.other.remove.cs', line, column); export const Return = (line?: number, column?: number) => - createToken('return', 'keyword.other.return.cs', line, column); + createToken('return', 'keyword.control.flow.cs', line, column); export const Set = (line?: number, column?: number) => createToken('set', 'keyword.other.set.cs', line, column); @@ -270,10 +308,10 @@ export namespace Tokens { export const Subtraction = (line?: number, column?: number) => createToken('-', 'keyword.operator.arithmetic.cs', line, column); } - + export const Assignment = (line?: number, column?: number) => createToken('=', 'keyword.operator.assignment.cs', line, column); - } + } export namespace Puncuation { export const Accessor = (line?: number, column?: number) => From 541464f59fdd51cc4f135615ee144a4951214a51 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 11:50:22 -0800 Subject: [PATCH 038/192] Update a couple tests --- test/syntaxes/indexers.test.syntax.ts | 20 ++++++------------- .../interation-statements.test.syntax.ts | 2 +- test/syntaxes/utils/tokenizer.ts | 17 ++++++++++++++-- 3 files changed, 22 insertions(+), 17 deletions(-) diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 5bd31d9237..59ac9ba465 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,21 +12,15 @@ describe("Grammar", () => { describe("Indexers", () => { it("declaration", () => { - const input = ` -class Tester -{ + const input = Input.InClass(` public string this[int index] { get { return index.ToString(); } - } -}`; - let tokens = tokenize(input); + }`); - tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), + const tokens = tokenize(input); + tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public(4, 5), Tokens.Type("string", 4, 12), Tokens.Keywords.This(4, 19), @@ -42,9 +36,7 @@ class Tester Tokens.Variables.ReadWrite("ToString", 6, 28), Tokens.Puncuation.Semicolon(6, 38), Tokens.Puncuation.CurlyBrace.Close(6, 40), - Tokens.Puncuation.CurlyBrace.Close(7, 5), - - Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + Tokens.Puncuation.CurlyBrace.Close(7, 5)]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interation-statements.test.syntax.ts b/test/syntaxes/interation-statements.test.syntax.ts index 39754b7585..090efd4b9d 100644 --- a/test/syntaxes/interation-statements.test.syntax.ts +++ b/test/syntaxes/interation-statements.test.syntax.ts @@ -12,7 +12,7 @@ describe("Grammar", () => { describe.skip("Iteration statements (loops)", () => { it("single-line declaration with no parameters", () => { - const input = Input.InMethodBody(` + const input = Input.InMethod(` while (true) { } `); diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 4a3dd92854..7905c7adef 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -66,7 +66,20 @@ export class Input { return new Input(lines, new Span(0, 0, lines.length - 1, lines[lines.length - 1].length)); } - public static InMethodBody(input: string) { + public static InClass(input: string) { + let text = ` +class Tester { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, new Span(2, 4, lines.length - 1, 0)); + } + + public static InMethod(input: string) { let text = ` class Tester { void M() { @@ -78,7 +91,7 @@ class Tester { text = text.replace('\r\n', '\n'); let lines = text.split('\n'); - return new Input(lines, new Span(3, 8, lines.length - 1, 0)); + return new Input(lines, new Span(3, 8, lines.length - 2, 0)); } } From 98a1240ed2f605c9e97c9603a6282474d0f6016c Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 12:53:45 -0800 Subject: [PATCH 039/192] Stop testing token line/column This adds little value since we're checking each token in the stream and reduces productivity when creating tests. --- test/syntaxes/attributes.test.syntax.ts | 176 ++++---- test/syntaxes/boolean-literals.test.syntax.ts | 38 +- test/syntaxes/classes.test.syntax.ts | 376 ++++++++--------- test/syntaxes/comments.test.syntax.ts | 16 +- test/syntaxes/delegates.test.syntax.ts | 86 ++-- test/syntaxes/enums.test.syntax.ts | 70 ++-- test/syntaxes/events.test.syntax.ts | 166 ++++---- test/syntaxes/extern-aliases.test.syntax.ts | 16 +- test/syntaxes/fields.test.syntax.ts | 250 +++++------ test/syntaxes/indexers.test.syntax.ts | 40 +- .../interation-statements.test.syntax.ts | 24 +- test/syntaxes/interfaces.test.syntax.ts | 60 +-- .../interpolated-strings.test.syntax.ts | 320 +++++++------- test/syntaxes/methods.test.syntax.ts | 108 ++--- test/syntaxes/namespaces.test.syntax.ts | 128 +++--- test/syntaxes/numeric-literals.test.syntax.ts | 72 ++-- test/syntaxes/properties.test.syntax.ts | 390 +++++++++--------- test/syntaxes/structs.test.syntax.ts | 52 +-- test/syntaxes/using-directives.test.syntax.ts | 202 ++++----- test/syntaxes/utils/tokenizer.ts | 344 +++++---------- 20 files changed, 1389 insertions(+), 1545 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index e9c96cfb11..a11ce8e751 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -18,9 +18,9 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.SquareBracket.Close(2, 5)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.SquareBracket.Close]); }); it("global attribute with specifier", () => { @@ -31,11 +31,11 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Keywords.AttributeSpecifier("assembly", 2, 2), - Tokens.Puncuation.Colon(2, 10), - Tokens.Type("Foo", 2, 12), - Tokens.Puncuation.SquareBracket.Close(2, 15)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Keywords.AttributeSpecifier("assembly"), + Tokens.Puncuation.Colon, + Tokens.Type("Foo"), + Tokens.Puncuation.SquareBracket.Close]); }); it("Two global attributes in same section with specifier", () => { @@ -46,13 +46,13 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Keywords.AttributeSpecifier("module", 2, 2), - Tokens.Puncuation.Colon(2, 8), - Tokens.Type("Foo", 2, 10), - Tokens.Puncuation.Comma(2, 13), - Tokens.Type("Bar", 2, 15), - Tokens.Puncuation.SquareBracket.Close(2, 18)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Keywords.AttributeSpecifier("module"), + Tokens.Puncuation.Colon, + Tokens.Type("Foo"), + Tokens.Puncuation.Comma, + Tokens.Type("Bar"), + Tokens.Puncuation.SquareBracket.Close]); }); it("Two global attributes in same section with specifier and empty argument lists", () => { @@ -63,17 +63,17 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Keywords.AttributeSpecifier("module", 2, 2), - Tokens.Puncuation.Colon(2, 8), - Tokens.Type("Foo", 2, 10), - Tokens.Puncuation.Parenthesis.Open(2, 13), - Tokens.Puncuation.Parenthesis.Close(2, 14), - Tokens.Puncuation.Comma(2, 15), - Tokens.Type("Bar", 2, 17), - Tokens.Puncuation.Parenthesis.Open(2, 20), - Tokens.Puncuation.Parenthesis.Close(2, 21), - Tokens.Puncuation.SquareBracket.Close(2, 22)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Keywords.AttributeSpecifier("module"), + Tokens.Puncuation.Colon, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.Comma, + Tokens.Type("Bar"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with one argument", () => { @@ -84,12 +84,12 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.Parenthesis.Open(2, 5), - Tokens.Literals.Boolean.True(2, 6), - Tokens.Puncuation.Parenthesis.Close(2, 10), - Tokens.Puncuation.SquareBracket.Close(2, 11)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with two arguments", () => { @@ -100,14 +100,14 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.Parenthesis.Open(2, 5), - Tokens.Literals.Boolean.True(2, 6), - Tokens.Puncuation.Comma(2, 10), - Tokens.Literals.Numeric.Decimal("42", 2, 12), - Tokens.Puncuation.Parenthesis.Close(2, 14), - Tokens.Puncuation.SquareBracket.Close(2, 15)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Comma, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with three arguments", () => { @@ -118,18 +118,18 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.Parenthesis.Open(2, 5), - Tokens.Literals.Boolean.True(2, 6), - Tokens.Puncuation.Comma(2, 10), - Tokens.Literals.Numeric.Decimal("42", 2, 12), - Tokens.Puncuation.Comma(2, 14), - Tokens.Puncuation.String.Begin(2, 16), - Tokens.Literals.String("text", 2, 17), - Tokens.Puncuation.String.End(2, 21), - Tokens.Puncuation.Parenthesis.Close(2, 22), - Tokens.Puncuation.SquareBracket.Close(2, 23)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Comma, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Comma, + Tokens.Puncuation.String.Begin, + Tokens.Literals.String("text"), + Tokens.Puncuation.String.End, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with named argument", () => { @@ -140,14 +140,14 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.Parenthesis.Open(2, 5), - Tokens.Identifiers.PropertyName("Bar", 2, 6), - Tokens.Operators.Assignment(2, 10), - Tokens.Literals.Numeric.Decimal("42", 2, 12), - Tokens.Puncuation.Parenthesis.Close(2, 14), - Tokens.Puncuation.SquareBracket.Close(2, 15)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Identifiers.PropertyName("Bar"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with one positional argument and one named argument", () => { @@ -158,16 +158,16 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Type("Foo", 2, 2), - Tokens.Puncuation.Parenthesis.Open(2, 5), - Tokens.Literals.Boolean.True(2, 6), - Tokens.Puncuation.Comma(2, 10), - Tokens.Identifiers.PropertyName("Bar", 2, 12), - Tokens.Operators.Assignment(2, 16), - Tokens.Literals.Numeric.Decimal("42", 2, 18), - Tokens.Puncuation.Parenthesis.Close(2, 20), - Tokens.Puncuation.SquareBracket.Close(2, 21)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Comma, + Tokens.Identifiers.PropertyName("Bar"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); it("Global attribute with specifier, one positional argument, and two named arguments", () => { @@ -178,24 +178,24 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open(2, 1), - Tokens.Keywords.AttributeSpecifier("module", 2, 2), - Tokens.Puncuation.Colon(2, 8), - Tokens.Type("Foo", 2, 10), - Tokens.Puncuation.Parenthesis.Open(2, 13), - Tokens.Literals.Boolean.True(2, 14), - Tokens.Puncuation.Comma(2, 18), - Tokens.Identifiers.PropertyName("Bar", 2, 20), - Tokens.Operators.Assignment(2, 24), - Tokens.Literals.Numeric.Decimal("42", 2, 26), - Tokens.Puncuation.Comma(2, 28), - Tokens.Identifiers.PropertyName("Baz", 2, 30), - Tokens.Operators.Assignment(2, 34), - Tokens.Puncuation.String.Begin(2, 36), - Tokens.Literals.String("hello", 2, 37), - Tokens.Puncuation.String.End(2, 42), - Tokens.Puncuation.Parenthesis.Close(2, 43), - Tokens.Puncuation.SquareBracket.Close(2, 44)]); + Tokens.Puncuation.SquareBracket.Open, + Tokens.Keywords.AttributeSpecifier("module"), + Tokens.Puncuation.Colon, + Tokens.Type("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Comma, + Tokens.Identifiers.PropertyName("Bar"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Comma, + Tokens.Identifiers.PropertyName("Baz"), + Tokens.Operators.Assignment, + Tokens.Puncuation.String.Begin, + Tokens.Literals.String("hello"), + Tokens.Puncuation.String.End, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.SquareBracket.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index fbf1ddd09c..feb891419b 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -20,17 +20,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("bool", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 10), - Tokens.Operators.Assignment(3, 12), - Tokens.Literals.Boolean.True(3, 14), - Tokens.Puncuation.Semicolon(3, 18), + Tokens.Type("bool"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); it("false", () => { @@ -43,17 +43,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), - - Tokens.Type("bool", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 10), - Tokens.Operators.Assignment(3, 12), - Tokens.Literals.Boolean.False(3, 14), - Tokens.Puncuation.Semicolon(3, 19), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("bool"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Boolean.False, + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index fb4312f2e7..1ec8212daf 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -36,67 +36,67 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Keywords.Class(4, 24), - Tokens.Identifiers.ClassName("PublicClass", 4, 30), - Tokens.Puncuation.CurlyBrace.Open(4, 42), - Tokens.Puncuation.CurlyBrace.Close(4, 44), - - Tokens.Keywords.Class(6, 24), - Tokens.Identifiers.ClassName("DefaultClass", 6, 30), - Tokens.Puncuation.CurlyBrace.Open(6, 43), - Tokens.Puncuation.CurlyBrace.Close(6, 45), - - Tokens.Keywords.Modifiers.Internal(8, 5), - Tokens.Keywords.Class(8, 24), - Tokens.Identifiers.ClassName("InternalClass", 8, 30), - Tokens.Puncuation.CurlyBrace.Open(8, 44), - Tokens.Puncuation.CurlyBrace.Close(8, 46), - - Tokens.Keywords.Modifiers.Static(10, 15), - Tokens.Keywords.Class(10, 24), - Tokens.Identifiers.ClassName("DefaultStaticClass", 10, 30), - Tokens.Puncuation.CurlyBrace.Open(10, 49), - Tokens.Puncuation.CurlyBrace.Close(10, 51), - - Tokens.Keywords.Modifiers.Public(12, 5), - Tokens.Keywords.Modifiers.Static(12, 15), - Tokens.Keywords.Class(12, 24), - Tokens.Identifiers.ClassName("PublicStaticClass", 12, 30), - Tokens.Puncuation.CurlyBrace.Open(12, 48), - Tokens.Puncuation.CurlyBrace.Close(12, 50), - - Tokens.Keywords.Modifiers.Sealed(14, 15), - Tokens.Keywords.Class(14, 24), - Tokens.Identifiers.ClassName("DefaultSealedClass", 14, 30), - Tokens.Puncuation.CurlyBrace.Open(14, 49), - Tokens.Puncuation.CurlyBrace.Close(14, 51), - - Tokens.Keywords.Modifiers.Public(16, 5), - Tokens.Keywords.Modifiers.Sealed(16, 15), - Tokens.Keywords.Class(16, 24), - Tokens.Identifiers.ClassName("PublicSealedClass", 16, 30), - Tokens.Puncuation.CurlyBrace.Open(16, 48), - Tokens.Puncuation.CurlyBrace.Close(16, 50), - - Tokens.Keywords.Modifiers.Public(18, 5), - Tokens.Keywords.Modifiers.Abstract(18, 15), - Tokens.Keywords.Class(18, 24), - Tokens.Identifiers.ClassName("PublicAbstractClass", 18, 30), - Tokens.Puncuation.CurlyBrace.Open(18, 50), - Tokens.Puncuation.CurlyBrace.Close(18, 52), - - Tokens.Keywords.Modifiers.Abstract(20, 15), - Tokens.Keywords.Class(20, 24), - Tokens.Identifiers.ClassName("DefaultAbstractClass", 20, 30), - Tokens.Puncuation.CurlyBrace.Open(20, 51), - Tokens.Puncuation.CurlyBrace.Close(20, 53), - - Tokens.Puncuation.CurlyBrace.Close(21, 1)]); + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("DefaultClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Internal, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("InternalClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Static, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("DefaultStaticClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Modifiers.Static, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicStaticClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Sealed, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("DefaultSealedClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Modifiers.Sealed, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicSealedClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Modifiers.Abstract, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicAbstractClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Modifiers.Abstract, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("DefaultAbstractClass"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("generics in identifier", () => { @@ -109,16 +109,16 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Class(4, 5), - Tokens.Identifiers.ClassName("Dictionary", 4, 11), - Tokens.Puncuation.CurlyBrace.Open(4, 36), - Tokens.Puncuation.CurlyBrace.Close(4, 38), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Dictionary"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); it("inheritance", () => { @@ -133,67 +133,67 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Class(4, 5), - Tokens.Identifiers.ClassName("PublicClass", 4, 11), - Tokens.Puncuation.Colon(4, 26), - Tokens.Type("IInterface", 4, 28), - Tokens.Puncuation.Comma(4, 38), - Tokens.Type("IInterfaceTwo", 4, 43), - Tokens.Puncuation.CurlyBrace.Open(4, 57), - Tokens.Puncuation.CurlyBrace.Close(4, 59), - - Tokens.Keywords.Class(5, 5), - Tokens.Identifiers.ClassName("PublicClass", 5, 11), - Tokens.Puncuation.Colon(5, 26), - Tokens.Type("Root", 5, 28), - Tokens.Puncuation.Accessor(5, 32), - Tokens.Type("IInterface", 5, 33), - Tokens.Puncuation.TypeParameters.Begin(5, 43), - Tokens.Type("Something", 5, 44), - Tokens.Puncuation.Accessor(5, 53), - Tokens.Type("Nested", 5, 54), - Tokens.Puncuation.TypeParameters.End(5, 60), - Tokens.Puncuation.Comma(5, 61), - Tokens.Type("Something", 5, 63), - Tokens.Puncuation.Accessor(5, 72), - Tokens.Type("IInterfaceTwo", 5, 73), - Tokens.Puncuation.CurlyBrace.Open(5, 87), - Tokens.Puncuation.CurlyBrace.Close(5, 89), - - Tokens.Keywords.Class(6, 5), - Tokens.Identifiers.ClassName("PublicClass", 6, 11), - Tokens.Puncuation.Colon(6, 26), - Tokens.Type("Dictionary", 6, 28), - Tokens.Puncuation.TypeParameters.Begin(6, 38), - Tokens.Type("T", 6, 39), - Tokens.Puncuation.Comma(6, 40), - Tokens.Type("Dictionary", 6, 42), - Tokens.Puncuation.TypeParameters.Begin(6, 52), - Tokens.Type("string", 6, 53), - Tokens.Puncuation.Comma(6, 59), - Tokens.Type("string", 6, 61), - Tokens.Puncuation.TypeParameters.End(6, 67), - Tokens.Puncuation.TypeParameters.End(6, 68), - Tokens.Puncuation.Comma(6, 69), - Tokens.Type("IMap", 6, 71), - Tokens.Puncuation.TypeParameters.Begin(6, 75), - Tokens.Type("T", 6, 76), - Tokens.Puncuation.Comma(6, 77), - Tokens.Type("Dictionary", 6, 79), - Tokens.Puncuation.TypeParameters.Begin(6, 89), - Tokens.Type("string", 6, 90), - Tokens.Puncuation.Comma(6, 96), - Tokens.Type("string", 6, 98), - Tokens.Puncuation.TypeParameters.End(6, 104), - Tokens.Puncuation.TypeParameters.End(6, 105), - Tokens.Puncuation.CurlyBrace.Open(6, 107), - Tokens.Puncuation.CurlyBrace.Close(6, 109), - - Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Puncuation.Colon, + Tokens.Type("IInterface"), + Tokens.Puncuation.Comma, + Tokens.Type("IInterfaceTwo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Puncuation.Colon, + Tokens.Type("Root"), + Tokens.Puncuation.Accessor, + Tokens.Type("IInterface"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("Something"), + Tokens.Puncuation.Accessor, + Tokens.Type("Nested"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Comma, + Tokens.Type("Something"), + Tokens.Puncuation.Accessor, + Tokens.Type("IInterfaceTwo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Puncuation.Colon, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.Comma, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.Comma, + Tokens.Type("string"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Comma, + Tokens.Type("IMap"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.Comma, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.Comma, + Tokens.Type("string"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic constraints", () => { @@ -211,51 +211,51 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Class(4, 5), - Tokens.Identifiers.ClassName("PublicClass", 4, 11), - Tokens.Keywords.Where(4, 26), - Tokens.Type("T", 4, 32), - Tokens.Puncuation.Colon(4, 34), - Tokens.Type("ISomething", 4, 36), - Tokens.Puncuation.CurlyBrace.Open(4, 47), - Tokens.Puncuation.CurlyBrace.Close(4, 49), - - Tokens.Keywords.Class(5, 5), - Tokens.Identifiers.ClassName("PublicClass", 5, 11), - Tokens.Puncuation.Colon(5, 29), - Tokens.Type("Dictionary", 5, 31), - Tokens.Puncuation.TypeParameters.Begin(5, 41), - Tokens.Type("T", 5, 42), - Tokens.Puncuation.Comma(5, 43), - Tokens.Type("List", 5, 45), - Tokens.Puncuation.TypeParameters.Begin(5, 49), - Tokens.Type("string", 5, 50), - Tokens.Puncuation.TypeParameters.End(5, 56), - Tokens.Puncuation.SquareBracket.Open(5, 57), - Tokens.Puncuation.SquareBracket.Close(5, 58), - Tokens.Puncuation.TypeParameters.End(5, 59), - Tokens.Puncuation.Comma(5, 60), - Tokens.Type("ISomething", 5, 62), - Tokens.Keywords.Where(6, 9), - Tokens.Type("T", 6, 15), - Tokens.Puncuation.Colon(6, 17), - Tokens.Type("ICar", 6, 19), - Tokens.Puncuation.Comma(6, 23), - Tokens.Keywords.New(6, 25), - Tokens.Puncuation.Parenthesis.Open(6, 28), - Tokens.Puncuation.Parenthesis.Close(6, 29), - Tokens.Keywords.Where(7, 9), - Tokens.Type("X", 7, 15), - Tokens.Puncuation.Colon(7, 17), - Tokens.Keywords.Struct(7, 19), - Tokens.Puncuation.CurlyBrace.Open(8, 5), - Tokens.Puncuation.CurlyBrace.Close(9, 5), - - Tokens.Puncuation.CurlyBrace.Close(10, 1)]); + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Keywords.Where, + Tokens.Type("T"), + Tokens.Puncuation.Colon, + Tokens.Type("ISomething"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("PublicClass"), + Tokens.Puncuation.Colon, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.Comma, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Comma, + Tokens.Type("ISomething"), + Tokens.Keywords.Where, + Tokens.Type("T"), + Tokens.Puncuation.Colon, + Tokens.Type("ICar"), + Tokens.Puncuation.Comma, + Tokens.Keywords.New, + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Keywords.Where, + Tokens.Type("X"), + Tokens.Puncuation.Colon, + Tokens.Keywords.Struct, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("nested class", () => { @@ -274,23 +274,23 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Class(4, 5), - Tokens.Identifiers.ClassName("Klass", 4, 11), - Tokens.Puncuation.CurlyBrace.Open(5, 5), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Klass"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public(6, 9), - Tokens.Keywords.Class(6, 16), - Tokens.Identifiers.ClassName("Nested", 6, 22), - Tokens.Puncuation.CurlyBrace.Open(7, 9), - Tokens.Puncuation.CurlyBrace.Close(9, 9), + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Nested"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close(10, 5), + Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close(11, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 047724b6d0..12274ba1f6 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.SingleLine.Start(2, 1), - Tokens.Comment.SingleLine.Text(" foo", 2, 3)]); + Tokens.Comment.SingleLine.Start, + Tokens.Comment.SingleLine.Text(" foo")]); }); it("single-line comment after whitespace", () => { @@ -30,9 +30,9 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.LeadingWhitespace(" ", 2, 1), - Tokens.Comment.SingleLine.Start(2, 5), - Tokens.Comment.SingleLine.Text(" foo", 2, 7)]); + Tokens.Comment.LeadingWhitespace(" "), + Tokens.Comment.SingleLine.Start, + Tokens.Comment.SingleLine.Text(" foo")]); }); it("multi-line comment", () => { @@ -43,9 +43,9 @@ describe("Grammar", () => { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.MultiLine.Start(2, 1), - Tokens.Comment.MultiLine.Text(" foo ", 2, 3), - Tokens.Comment.MultiLine.End(2, 8)]); + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.Text(" foo "), + Tokens.Comment.MultiLine.End]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index c7472d22ac..fba8080a03 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -19,12 +19,12 @@ delegate void D(); let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate(2, 1), - Tokens.Type("void", 2, 10), - Tokens.Identifiers.DelegateName("D", 2, 15), - Tokens.Puncuation.Parenthesis.Open(2, 16), - Tokens.Puncuation.Parenthesis.Close(2, 17), - Tokens.Puncuation.Semicolon(2, 18)]); + Tokens.Keywords.Delegate, + Tokens.Type("void"), + Tokens.Identifiers.DelegateName("D"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.Semicolon]); }); it("generic delegate with variance", () => { @@ -36,14 +36,14 @@ delegate TResult D(T arg1); let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate(2, 1), - Tokens.Type("TResult", 2, 10), - Tokens.Identifiers.DelegateName("D", 2, 18), - Tokens.Puncuation.Parenthesis.Open(2, 38), - Tokens.Type("T", 2, 39), - Tokens.Variables.Parameter("arg1", 2, 41), - Tokens.Puncuation.Parenthesis.Close(2, 45), - Tokens.Puncuation.Semicolon(2, 46)]); + Tokens.Keywords.Delegate, + Tokens.Type("TResult"), + Tokens.Identifiers.DelegateName("D"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Type("T"), + Tokens.Variables.Parameter("arg1"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.Semicolon]); }); it("generic delegate with constraints", () => { @@ -56,16 +56,16 @@ delegate void D() let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate(2, 1), - Tokens.Type("void", 2, 10), - Tokens.Identifiers.DelegateName("D", 2, 15), - Tokens.Puncuation.Parenthesis.Open(2, 24), - Tokens.Puncuation.Parenthesis.Close(2, 25), - Tokens.Keywords.Where(3, 5), - Tokens.Type("T1", 3, 11), - Tokens.Puncuation.Colon(3, 14), - Tokens.Type("T2", 3, 16), - Tokens.Puncuation.Semicolon(3, 18)]); + Tokens.Keywords.Delegate, + Tokens.Type("void"), + Tokens.Identifiers.DelegateName("D"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Keywords.Where, + Tokens.Type("T1"), + Tokens.Puncuation.Colon, + Tokens.Type("T2"), + Tokens.Puncuation.Semicolon]); }); it("delegate with multiple parameters", () => { @@ -77,25 +77,25 @@ delegate int D(ref string x, out int y, params object[] z); let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate(2, 1), - Tokens.Type("int", 2, 10), - Tokens.Identifiers.DelegateName("D", 2, 14), - Tokens.Puncuation.Parenthesis.Open(2, 15), - Tokens.Keywords.Modifiers.Ref(2, 16), - Tokens.Type("string", 2, 20), - Tokens.Variables.Parameter("x", 2, 27), - Tokens.Puncuation.Comma(2, 28), - Tokens.Keywords.Modifiers.Out(2, 30), - Tokens.Type("int", 2, 34), - Tokens.Variables.Parameter("y", 2, 38), - Tokens.Puncuation.Comma(2, 39), - Tokens.Keywords.Modifiers.Params(2, 41), - Tokens.Type("object", 2, 48), - Tokens.Puncuation.SquareBracket.Open(2, 54), - Tokens.Puncuation.SquareBracket.Close(2, 55), - Tokens.Variables.Parameter("z", 2, 57), - Tokens.Puncuation.Parenthesis.Close(2, 58), - Tokens.Puncuation.Semicolon(2, 59)]); + Tokens.Keywords.Delegate, + Tokens.Type("int"), + Tokens.Identifiers.DelegateName("D"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Keywords.Modifiers.Ref, + Tokens.Type("string"), + Tokens.Variables.Parameter("x"), + Tokens.Puncuation.Comma, + Tokens.Keywords.Modifiers.Out, + Tokens.Type("int"), + Tokens.Variables.Parameter("y"), + Tokens.Puncuation.Comma, + Tokens.Keywords.Modifiers.Params, + Tokens.Type("object"), + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Variables.Parameter("z"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 2b168b4f98..49b86969ad 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -19,10 +19,10 @@ enum E { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum(2, 1), - Tokens.Identifiers.EnumName("E", 2, 6), - Tokens.Puncuation.CurlyBrace.Open(2, 8), - Tokens.Puncuation.CurlyBrace.Close(2, 10)]); + Tokens.Keywords.Enum, + Tokens.Identifiers.EnumName("E"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("enum with base type", () => { @@ -34,12 +34,12 @@ enum E : byte { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum(2, 1), - Tokens.Identifiers.EnumName("E", 2, 6), - Tokens.Puncuation.Colon(2, 8), - Tokens.Type("byte", 2, 10), - Tokens.Puncuation.CurlyBrace.Open(2, 15), - Tokens.Puncuation.CurlyBrace.Close(2, 17)]); + Tokens.Keywords.Enum, + Tokens.Identifiers.EnumName("E"), + Tokens.Puncuation.Colon, + Tokens.Type("byte"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("enum with single member", () => { @@ -51,11 +51,11 @@ enum E { M1 } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum(2, 1), - Tokens.Identifiers.EnumName("E", 2, 6), - Tokens.Puncuation.CurlyBrace.Open(2, 8), - Tokens.Variables.EnumMember("M1", 2, 10), - Tokens.Puncuation.CurlyBrace.Close(2, 13)]); + Tokens.Keywords.Enum, + Tokens.Identifiers.EnumName("E"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Variables.EnumMember("M1"), + Tokens.Puncuation.CurlyBrace.Close]); }); it("enum with multiple members", () => { @@ -67,15 +67,15 @@ enum Color { Red, Green, Blue } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum(2, 1), - Tokens.Identifiers.EnumName("Color", 2, 6), - Tokens.Puncuation.CurlyBrace.Open(2, 12), - Tokens.Variables.EnumMember("Red", 2, 14), - Tokens.Puncuation.Comma(2, 17), - Tokens.Variables.EnumMember("Green", 2, 19), - Tokens.Puncuation.Comma(2, 24), - Tokens.Variables.EnumMember("Blue", 2, 26), - Tokens.Puncuation.CurlyBrace.Close(2, 31)]); + Tokens.Keywords.Enum, + Tokens.Identifiers.EnumName("Color"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Variables.EnumMember("Red"), + Tokens.Puncuation.Comma, + Tokens.Variables.EnumMember("Green"), + Tokens.Puncuation.Comma, + Tokens.Variables.EnumMember("Blue"), + Tokens.Puncuation.CurlyBrace.Close]); }); it("enum with initialized member", () => { @@ -92,17 +92,17 @@ enum E let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum(2, 1), - Tokens.Identifiers.EnumName("E", 2, 6), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - Tokens.Variables.EnumMember("Value1", 4, 5), - Tokens.Operators.Assignment(4, 12), - Tokens.Literals.Numeric.Decimal("1", 4, 14), - Tokens.Puncuation.Comma(4, 15), - Tokens.Variables.EnumMember("Value2", 5, 5), - Tokens.Puncuation.Comma(5, 11), - Tokens.Variables.EnumMember("Value3", 6, 5), - Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + Tokens.Keywords.Enum, + Tokens.Identifiers.EnumName("E"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Variables.EnumMember("Value1"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("1"), + Tokens.Puncuation.Comma, + Tokens.Variables.EnumMember("Value2"), + Tokens.Puncuation.Comma, + Tokens.Variables.EnumMember("Value3"), + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index d09e030489..892a4b21e0 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -21,18 +21,18 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Keywords.Event(4, 12), - Tokens.Type("Type", 4, 18), - Tokens.Identifiers.EventName("Event", 4, 23), - Tokens.Puncuation.Semicolon(4, 28), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Event, + Tokens.Type("Type"), + Tokens.Identifiers.EventName("Event"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with multiple modifiers", () => { @@ -46,19 +46,19 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Protected(4, 5), - Tokens.Keywords.Modifiers.Internal(4, 15), - Tokens.Keywords.Event(4, 24), - Tokens.Type("Type", 4, 30), - Tokens.Identifiers.EventName("Event", 4, 35), - Tokens.Puncuation.Semicolon(4, 40), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Protected, + Tokens.Keywords.Modifiers.Internal, + Tokens.Keywords.Event, + Tokens.Type("Type"), + Tokens.Identifiers.EventName("Event"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with multiple declarators", () => { @@ -72,20 +72,20 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Keywords.Event(4, 12), - Tokens.Type("Type", 4, 18), - Tokens.Identifiers.EventName("Event1", 4, 23), - Tokens.Puncuation.Comma(4, 29), - Tokens.Identifiers.EventName("Event2", 4, 31), - Tokens.Puncuation.Semicolon(4, 37), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Event, + Tokens.Type("Type"), + Tokens.Identifiers.EventName("Event1"), + Tokens.Puncuation.Comma, + Tokens.Identifiers.EventName("Event2"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic", () => { @@ -99,31 +99,31 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Keywords.Event(4, 12), - Tokens.Type("EventHandler", 4, 18), - Tokens.Puncuation.TypeParameters.Begin(4, 30), - Tokens.Type("List", 4, 31), - Tokens.Puncuation.TypeParameters.Begin(4, 35), - Tokens.Type("T", 4, 36), - Tokens.Puncuation.TypeParameters.End(4, 37), - Tokens.Puncuation.Comma(4, 38), - Tokens.Type("Dictionary", 4, 40), - Tokens.Puncuation.TypeParameters.Begin(4, 50), - Tokens.Type("T", 4, 51), - Tokens.Puncuation.Comma(4, 52), - Tokens.Type("D", 4, 54), - Tokens.Puncuation.TypeParameters.End(4, 55), - Tokens.Puncuation.TypeParameters.End(4, 56), - Tokens.Identifiers.EventName("Event", 4, 58), - Tokens.Puncuation.Semicolon(4, 63), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Event, + Tokens.Type("EventHandler"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Comma, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.Comma, + Tokens.Type("D"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Identifiers.EventName("Event"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with accessors", () => { @@ -141,25 +141,25 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Keywords.Event(4, 12), - Tokens.Type("Type", 4, 18), - Tokens.Identifiers.EventName("Event", 4, 23), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Keywords.Add(6, 9), - Tokens.Puncuation.CurlyBrace.Open(6, 13), - Tokens.Puncuation.CurlyBrace.Close(6, 15), - Tokens.Keywords.Remove(7, 9), - Tokens.Puncuation.CurlyBrace.Open(7, 16), - Tokens.Puncuation.CurlyBrace.Close(7, 18), - Tokens.Puncuation.CurlyBrace.Close(8, 5), - - Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Event, + Tokens.Type("Type"), + Tokens.Identifiers.EventName("Event"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Add, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Keywords.Remove, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index 064d3eb177..4febcdd9c7 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -19,14 +19,14 @@ extern alias Y;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Extern(2, 1), - Tokens.Keywords.Alias(2, 8), - Tokens.Variables.Alias("X", 2, 14), - Tokens.Puncuation.Semicolon(2, 15), - Tokens.Keywords.Extern(3, 1), - Tokens.Keywords.Alias(3, 8), - Tokens.Variables.Alias("Y", 3, 14), - Tokens.Puncuation.Semicolon(3, 15)]); + Tokens.Keywords.Extern, + Tokens.Keywords.Alias, + Tokens.Variables.Alias("X"), + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Extern, + Tokens.Keywords.Alias, + Tokens.Variables.Alias("Y"), + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 2698c4fab6..9b38c85ac4 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -23,27 +23,27 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Type("List", 4, 13), - Tokens.Identifiers.FieldName("_field", 4, 18), - Tokens.Puncuation.Semicolon(4, 24), - - Tokens.Keywords.Modifiers.Private(5, 5), - Tokens.Type("List", 5, 13), - Tokens.Identifiers.FieldName("field", 5, 18), - Tokens.Puncuation.Semicolon(5, 23), - - Tokens.Keywords.Modifiers.Private(6, 5), - Tokens.Type("List", 6, 13), - Tokens.Identifiers.FieldName("field123", 6, 18), - Tokens.Puncuation.Semicolon(6, 26), - - Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("List"), + Tokens.Identifiers.FieldName("_field"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("List"), + Tokens.Identifiers.FieldName("field"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("List"), + Tokens.Identifiers.FieldName("field123"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic", () => { @@ -57,30 +57,30 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Type("Dictionary", 4, 13), - Tokens.Puncuation.TypeParameters.Begin(4, 23), - Tokens.Type("List", 4, 25), - Tokens.Puncuation.TypeParameters.Begin(4, 29), - Tokens.Type("T", 4, 30), - Tokens.Puncuation.TypeParameters.End(4, 31), - Tokens.Puncuation.Comma(4, 32), - Tokens.Type("Dictionary", 4, 34), - Tokens.Puncuation.TypeParameters.Begin(4, 44), - Tokens.Type("T", 4, 45), - Tokens.Puncuation.Comma(4, 46), - Tokens.Type("D", 4, 48), - Tokens.Puncuation.TypeParameters.End(4, 49), - Tokens.Puncuation.TypeParameters.End(4, 50), - Tokens.Identifiers.FieldName("_field", 4, 52), - Tokens.Puncuation.Semicolon(4, 58), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Comma, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.Comma, + Tokens.Type("D"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Identifiers.FieldName("_field"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); @@ -97,28 +97,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Keywords.Modifiers.Static(4, 13), - Tokens.Keywords.Modifiers.ReadOnly(4, 20), - Tokens.Type("List", 4, 29), - Tokens.Identifiers.FieldName("_field", 4, 34), - Tokens.Puncuation.Semicolon(4, 40), - - Tokens.Keywords.Modifiers.ReadOnly(5, 5), - Tokens.Type("string", 5, 14), - Tokens.Identifiers.FieldName("_field2", 5, 21), - Tokens.Puncuation.Semicolon(5, 28), - - Tokens.Type("string", 6, 5), - Tokens.Identifiers.FieldName("_field3", 6, 12), - Tokens.Puncuation.Semicolon(6, 19), - - Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Private, + Tokens.Keywords.Modifiers.Static, + Tokens.Keywords.Modifiers.ReadOnly, + Tokens.Type("List"), + Tokens.Identifiers.FieldName("_field"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Modifiers.ReadOnly, + Tokens.Type("string"), + Tokens.Identifiers.FieldName("_field2"), + Tokens.Puncuation.Semicolon, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("_field3"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("types", () => { @@ -133,22 +133,22 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("field123", 4, 12), - Tokens.Puncuation.Semicolon(4, 20), - - Tokens.Type("string", 5, 5), - Tokens.Puncuation.SquareBracket.Open(5, 11), - Tokens.Puncuation.SquareBracket.Close(5, 12), - Tokens.Identifiers.FieldName("field123", 5, 14), - Tokens.Puncuation.Semicolon(5, 22), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("field123"), + Tokens.Puncuation.Semicolon, + + Tokens.Type("string"), + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Identifiers.FieldName("field123"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("assignment", () => { @@ -163,28 +163,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Type("string", 4, 13), - Tokens.Identifiers.FieldName("field", 4, 20), - Tokens.Operators.Assignment(4, 26), - Tokens.Puncuation.String.Begin(4, 28), - Tokens.Literals.String("hello", 4, 29), - Tokens.Puncuation.String.End(4, 34), - Tokens.Puncuation.Semicolon(4, 35), - - Tokens.Keywords.Modifiers.Const(5, 5), - Tokens.Type("bool", 5, 13), - Tokens.Identifiers.FieldName("field", 5, 20), - Tokens.Operators.Assignment(5, 26), - Tokens.Literals.Boolean.True(5, 28), - Tokens.Puncuation.Semicolon(5, 32), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("string"), + Tokens.Identifiers.FieldName("field"), + Tokens.Operators.Assignment, + Tokens.Puncuation.String.Begin, + Tokens.Literals.String("hello"), + Tokens.Puncuation.String.End, + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Modifiers.Const, + Tokens.Type("bool"), + Tokens.Identifiers.FieldName("field"), + Tokens.Operators.Assignment, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with multiple declarators", () => { @@ -198,26 +198,26 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("int", 4, 5), - Tokens.Identifiers.FieldName("x", 4, 9), - Tokens.Operators.Assignment(4, 11), - Tokens.Literals.Numeric.Decimal("19", 4, 13), - Tokens.Puncuation.Comma(4, 15), - Tokens.Identifiers.FieldName("y", 4, 17), - Tokens.Operators.Assignment(4, 19), - Tokens.Literals.Numeric.Decimal("23", 4, 21), - Tokens.Puncuation.Comma(4, 23), - Tokens.Identifiers.FieldName("z", 4, 25), - Tokens.Operators.Assignment(4, 27), - Tokens.Literals.Numeric.Decimal("42", 4, 29), - Tokens.Puncuation.Semicolon(4, 31), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("int"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("19"), + Tokens.Puncuation.Comma, + Tokens.Identifiers.FieldName("y"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("23"), + Tokens.Puncuation.Comma, + Tokens.Identifiers.FieldName("z"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("42"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 59ac9ba465..6806157aef 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -13,30 +13,30 @@ describe("Grammar", () => { it("declaration", () => { const input = Input.InClass(` - public string this[int index] - { - get { return index.ToString(); } - }`); +public string this[int index] +{ + get { return index.ToString(); } +}`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("string", 4, 12), - Tokens.Keywords.This(4, 19), - Tokens.Puncuation.SquareBracket.Open(4, 23), - Tokens.Type("int", 4, 24), - Tokens.Variables.Parameter("index", 4, 28), - Tokens.Puncuation.SquareBracket.Close(4, 33), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Keywords.Get(6, 9), - Tokens.Puncuation.CurlyBrace.Open(6, 13), - Tokens.Keywords.Return(6, 15), - Tokens.Variables.ReadWrite("index", 6, 22), - Tokens.Variables.ReadWrite("ToString", 6, 28), - Tokens.Puncuation.Semicolon(6, 38), - Tokens.Puncuation.CurlyBrace.Close(6, 40), - Tokens.Puncuation.CurlyBrace.Close(7, 5)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Type("string"), + Tokens.Keywords.This, + Tokens.Puncuation.SquareBracket.Open, + Tokens.Type("int"), + Tokens.Variables.Parameter("index"), + Tokens.Puncuation.SquareBracket.Close, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Return, + Tokens.Variables.ReadWrite("index"), + Tokens.Variables.ReadWrite("ToString"), + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interation-statements.test.syntax.ts b/test/syntaxes/interation-statements.test.syntax.ts index 090efd4b9d..4a98903094 100644 --- a/test/syntaxes/interation-statements.test.syntax.ts +++ b/test/syntaxes/interation-statements.test.syntax.ts @@ -19,18 +19,18 @@ while (true) { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("void", 4, 5), - Tokens.Identifiers.MethodName("Foo", 4, 10), - Tokens.Puncuation.Parenthesis.Open(4, 13), - Tokens.Puncuation.Parenthesis.Close(4, 14), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Puncuation.CurlyBrace.Close(7, 5), - - Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("void"), + Tokens.Identifiers.MethodName("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 776a9b1c2e..0de88364a9 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -19,10 +19,10 @@ interface IFoo { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(2, 16), - Tokens.Puncuation.CurlyBrace.Close(2, 18)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("interface inheritance", () => { @@ -35,16 +35,16 @@ interface IBar : IFoo { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(2, 16), - Tokens.Puncuation.CurlyBrace.Close(2, 18), - Tokens.Keywords.Interface(3, 1), - Tokens.Identifiers.InterfaceName("IBar", 3, 11), - Tokens.Puncuation.Colon(3, 16), - Tokens.Type("IFoo", 3, 18), - Tokens.Puncuation.CurlyBrace.Open(3, 23), - Tokens.Puncuation.CurlyBrace.Close(3, 25)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IBar"), + Tokens.Puncuation.Colon, + Tokens.Type("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic interface", () => { @@ -56,10 +56,10 @@ interface IFoo { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(2, 24), - Tokens.Puncuation.CurlyBrace.Close(2, 26)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic interface with variance", () => { @@ -71,10 +71,10 @@ interface IFoo { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(2, 31), - Tokens.Puncuation.CurlyBrace.Close(2, 33)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic interface with constraints", () => { @@ -86,14 +86,14 @@ interface IFoo where T1 : T2 { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Keywords.Where(2, 24), - Tokens.Type("T1", 2, 30), - Tokens.Puncuation.Colon(2, 33), - Tokens.Type("T2", 2, 35), - Tokens.Puncuation.CurlyBrace.Open(2, 38), - Tokens.Puncuation.CurlyBrace.Close(2, 40)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Keywords.Where, + Tokens.Type("T1"), + Tokens.Puncuation.Colon, + Tokens.Type("T2"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index 498fef3217..51da3c80ab 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -21,28 +21,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hello ", 4, 21), - Tokens.Puncuation.Interpolation.Begin(4, 27), - Tokens.Variables.ReadWrite("one", 4, 28), - Tokens.Puncuation.Interpolation.End(4, 31), - Tokens.Literals.String(" world ", 4, 32), - Tokens.Puncuation.Interpolation.Begin(4, 39), - Tokens.Variables.ReadWrite("two", 4, 40), - Tokens.Puncuation.Interpolation.End(4, 43), - Tokens.Literals.String("!", 4, 44), - Tokens.Puncuation.InterpolatedString.End(4, 45), - Tokens.Puncuation.Semicolon(4, 46), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hello "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("one"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String(" world "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("two"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String("!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("no interpolations", () => { @@ -56,20 +56,20 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hello world!", 4, 21), - Tokens.Puncuation.InterpolatedString.End(4, 33), - Tokens.Puncuation.Semicolon(4, 34), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hello world!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("no interpolations due to escaped braces", () => { @@ -83,20 +83,20 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hello {{one}} world {{two}}!", 4, 21), - Tokens.Puncuation.InterpolatedString.End(4, 49), - Tokens.Puncuation.Semicolon(4, 50), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hello {{one}} world {{two}}!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("two interpolations with escaped braces", () => { @@ -110,30 +110,30 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hello ", 4, 21), - Tokens.Literals.String("{{", 4, 27), - Tokens.Puncuation.Interpolation.Begin(4, 29), - Tokens.Variables.ReadWrite("one", 4, 30), - Tokens.Puncuation.Interpolation.End(4, 33), - Tokens.Literals.String("}} world ", 4, 34), - Tokens.Literals.String("{{", 4, 43), - Tokens.Puncuation.Interpolation.Begin(4, 45), - Tokens.Variables.ReadWrite("two", 4, 46), - Tokens.Puncuation.Interpolation.End(4, 49), - Tokens.Literals.String("}}!", 4, 50), - Tokens.Puncuation.InterpolatedString.End(4, 53), - Tokens.Puncuation.Semicolon(4, 54), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hello "), + Tokens.Literals.String("{{"), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("one"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String("}} world "), + Tokens.Literals.String("{{"), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("two"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String("}}!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("no interpolations due to double-escaped braces", () => { @@ -147,20 +147,20 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hello {{{{one}}}} world {{{{two}}}}!", 4, 21), - Tokens.Puncuation.InterpolatedString.End(4, 57), - Tokens.Puncuation.Semicolon(4, 58), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hello {{{{one}}}} world {{{{two}}}}!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("break across two lines (non-verbatim)", () => { @@ -175,22 +175,22 @@ world!"; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.Begin(4, 19), - Tokens.Literals.String("hell", 4, 21), + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.Begin, + Tokens.Literals.String("hell"), // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. - Tokens.IllegalNewLine("o", 4, 25), - Tokens.Variables.ReadWrite("world", 5, 1), - Tokens.Puncuation.String.Begin(5, 7), - Tokens.IllegalNewLine(";", 5, 8)]); + Tokens.IllegalNewLine("o"), + Tokens.Variables.ReadWrite("world"), + Tokens.Puncuation.String.Begin, + Tokens.IllegalNewLine(";")]); }); it("verbatim with two interpolations", () => { @@ -204,28 +204,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), - Tokens.Literals.String("hello ", 4, 22), - Tokens.Puncuation.Interpolation.Begin(4, 28), - Tokens.Variables.ReadWrite("one", 4, 29), - Tokens.Puncuation.Interpolation.End(4, 32), - Tokens.Literals.String(" world ", 4, 33), - Tokens.Puncuation.Interpolation.Begin(4, 40), - Tokens.Variables.ReadWrite("two", 4, 41), - Tokens.Puncuation.Interpolation.End(4, 44), - Tokens.Literals.String("!", 4, 45), - Tokens.Puncuation.InterpolatedString.End(4, 46), - Tokens.Puncuation.Semicolon(4, 47), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.VerbatimBegin, + Tokens.Literals.String("hello "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("one"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String(" world "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("two"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String("!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("break across two lines with two interpolations (verbatim)", () => { @@ -240,28 +240,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), - Tokens.Literals.String("hello ", 4, 22), - Tokens.Puncuation.Interpolation.Begin(4, 28), - Tokens.Variables.ReadWrite("one", 4, 29), - Tokens.Puncuation.Interpolation.End(4, 32), - Tokens.Literals.String(" world ", 5, 1), - Tokens.Puncuation.Interpolation.Begin(5, 11), - Tokens.Variables.ReadWrite("two", 5, 12), - Tokens.Puncuation.Interpolation.End(5, 15), - Tokens.Literals.String("!", 5, 16), - Tokens.Puncuation.InterpolatedString.End(5, 17), - Tokens.Puncuation.Semicolon(5, 18), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.VerbatimBegin, + Tokens.Literals.String("hello "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("one"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String(" world "), + Tokens.Puncuation.Interpolation.Begin, + Tokens.Variables.ReadWrite("two"), + Tokens.Puncuation.Interpolation.End, + Tokens.Literals.String("!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("break across two lines with no interpolations (verbatim)", () => { @@ -276,21 +276,21 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("string", 4, 5), - Tokens.Identifiers.FieldName("test", 4, 12), - Tokens.Operators.Assignment(4, 17), - Tokens.Puncuation.InterpolatedString.VerbatimBegin(4, 19), - Tokens.Literals.String("hello", 4, 22), - Tokens.Literals.String(" world!", 5, 1), - Tokens.Puncuation.InterpolatedString.End(5, 11), - Tokens.Puncuation.Semicolon(5, 12), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("string"), + Tokens.Identifiers.FieldName("test"), + Tokens.Operators.Assignment, + Tokens.Puncuation.InterpolatedString.VerbatimBegin, + Tokens.Literals.String("hello"), + Tokens.Literals.String(" world!"), + Tokens.Puncuation.InterpolatedString.End, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 200f23afc5..9059c8e1c3 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -20,18 +20,18 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("void", 4, 5), - Tokens.Identifiers.MethodName("Foo", 4, 10), - Tokens.Puncuation.Parenthesis.Open(4, 13), - Tokens.Puncuation.Parenthesis.Close(4, 14), - Tokens.Puncuation.CurlyBrace.Open(4, 16), - Tokens.Puncuation.CurlyBrace.Close(4, 18), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("void"), + Tokens.Identifiers.MethodName("Foo"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with two parameters", () => { @@ -47,28 +47,28 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("int", 4, 5), - Tokens.Identifiers.MethodName("Add", 4, 9), - Tokens.Puncuation.Parenthesis.Open(4, 12), - Tokens.Type("int", 4, 13), - Tokens.Variables.Parameter("x", 4, 17), - Tokens.Puncuation.Comma(4, 18), - Tokens.Type("int", 4, 20), - Tokens.Variables.Parameter("y", 4, 24), - Tokens.Puncuation.Parenthesis.Close(4, 25), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Keywords.Return(6, 9), - Tokens.Variables.ReadWrite("x", 6, 16), - Tokens.Operators.Arithmetic.Addition(6, 18), - Tokens.Variables.ReadWrite("y", 6, 20), - Tokens.Puncuation.Semicolon(6, 21), - Tokens.Puncuation.CurlyBrace.Close(7, 5), - - Tokens.Puncuation.CurlyBrace.Close(8, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("int"), + Tokens.Identifiers.MethodName("Add"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Type("int"), + Tokens.Variables.Parameter("x"), + Tokens.Puncuation.Comma, + Tokens.Type("int"), + Tokens.Variables.Parameter("y"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Return, + Tokens.Variables.ReadWrite("x"), + Tokens.Operators.Arithmetic.Addition, + Tokens.Variables.ReadWrite("y"), + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("expression body", () => { @@ -81,26 +81,26 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("int", 4, 5), - Tokens.Identifiers.MethodName("Add", 4, 9), - Tokens.Puncuation.Parenthesis.Open(4, 12), - Tokens.Type("int", 4, 13), - Tokens.Variables.Parameter("x", 4, 17), - Tokens.Puncuation.Comma(4, 18), - Tokens.Type("int", 4, 20), - Tokens.Variables.Parameter("y", 4, 24), - Tokens.Puncuation.Parenthesis.Close(4, 25), - Tokens.Operators.Arrow(4, 27), - Tokens.Variables.ReadWrite("x", 4, 30), - Tokens.Operators.Arithmetic.Addition(4, 32), - Tokens.Variables.ReadWrite("y", 4, 34), - Tokens.Puncuation.Semicolon(4, 35), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("int"), + Tokens.Identifiers.MethodName("Add"), + Tokens.Puncuation.Parenthesis.Open, + Tokens.Type("int"), + Tokens.Variables.Parameter("x"), + Tokens.Puncuation.Comma, + Tokens.Type("int"), + Tokens.Variables.Parameter("y"), + Tokens.Puncuation.Parenthesis.Close, + Tokens.Operators.Arrow, + Tokens.Variables.ReadWrite("x"), + Tokens.Operators.Arithmetic.Addition, + Tokens.Variables.ReadWrite("y"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index 9d47090362..f6b8ceb3a6 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -19,10 +19,10 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("has a namespace keyword and a dotted name", () => { @@ -34,12 +34,12 @@ namespace Test.Namespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("Test", 2, 11), - Tokens.Puncuation.Accessor(2, 15), - Tokens.Identifiers.NamespaceName("Namespace", 2, 16), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("Test"), + Tokens.Puncuation.Accessor, + Tokens.Identifiers.NamespaceName("Namespace"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("can be nested", () => { @@ -54,16 +54,16 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace(2, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(3, 1), + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Namespace(4, 5), - Tokens.Identifiers.NamespaceName("NestedNamespace", 4, 15), - Tokens.Puncuation.CurlyBrace.Open(4, 31), + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("NestedNamespace"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close(6, 5), - Tokens.Puncuation.CurlyBrace.Close(7, 1)]); + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close]); }); it("can contain using statements", () => { @@ -86,52 +86,52 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.NamespaceName("UsingOne", 2, 7), - Tokens.Puncuation.Semicolon(2, 15), - - Tokens.Keywords.Using(3, 1), - Tokens.Identifiers.AliasName("one", 3, 7), - Tokens.Operators.Assignment(3, 11), - Tokens.Type("UsingOne", 3, 13), - Tokens.Puncuation.Accessor(3, 21), - Tokens.Type("Something", 3, 22), - Tokens.Puncuation.Semicolon(3, 31), - - Tokens.Keywords.Namespace(5, 1), - Tokens.Identifiers.NamespaceName("TestNamespace", 5, 11), - Tokens.Puncuation.CurlyBrace.Open(6, 1), - - Tokens.Keywords.Using(7, 5), - Tokens.Identifiers.NamespaceName("UsingTwo", 7, 11), - Tokens.Puncuation.Semicolon(7, 19), - - Tokens.Keywords.Using(8, 5), - Tokens.Identifiers.AliasName("two", 8, 11), - Tokens.Operators.Assignment(8, 15), - Tokens.Type("UsingTwo", 8, 17), - Tokens.Puncuation.Accessor(8, 25), - Tokens.Type("Something", 8, 26), - Tokens.Puncuation.Semicolon(8, 35), - - Tokens.Keywords.Namespace(10, 5), - Tokens.Identifiers.NamespaceName("NestedNamespace", 10, 15), - Tokens.Puncuation.CurlyBrace.Open(11, 5), - - Tokens.Keywords.Using(12, 9), - Tokens.Identifiers.NamespaceName("UsingThree", 12, 15), - Tokens.Puncuation.Semicolon(12, 25), - - Tokens.Keywords.Using(13, 9), - Tokens.Identifiers.AliasName("three", 13, 15), - Tokens.Operators.Assignment(13, 21), - Tokens.Type("UsingThree", 13, 23), - Tokens.Puncuation.Accessor(13, 33), - Tokens.Type("Something", 13, 34), - Tokens.Puncuation.Semicolon(13, 43), - - Tokens.Puncuation.CurlyBrace.Close(14, 5), - Tokens.Puncuation.CurlyBrace.Close(15, 1)]); + Tokens.Keywords.Using, + Tokens.Identifiers.NamespaceName("UsingOne"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("one"), + Tokens.Operators.Assignment, + Tokens.Type("UsingOne"), + Tokens.Puncuation.Accessor, + Tokens.Type("Something"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("TestNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Using, + Tokens.Identifiers.NamespaceName("UsingTwo"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("two"), + Tokens.Operators.Assignment, + Tokens.Type("UsingTwo"), + Tokens.Puncuation.Accessor, + Tokens.Type("Something"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Namespace, + Tokens.Identifiers.NamespaceName("NestedNamespace"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Using, + Tokens.Identifiers.NamespaceName("UsingThree"), + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("three"), + Tokens.Operators.Assignment, + Tokens.Type("UsingThree"), + Tokens.Puncuation.Accessor, + Tokens.Type("Something"), + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 5e9056c890..f7f3137ba8 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -20,17 +20,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 9), - Tokens.Operators.Assignment(3, 11), - Tokens.Literals.Numeric.Decimal("0", 3, 13), - Tokens.Puncuation.Semicolon(3, 14), + Tokens.Type("int"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("0"), + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); it("hexadecimal zero", () => { @@ -43,17 +43,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 9), - Tokens.Operators.Assignment(3, 11), - Tokens.Literals.Numeric.Hexadecimal("0x0", 3, 13), - Tokens.Puncuation.Semicolon(3, 16), + Tokens.Type("int"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Hexadecimal("0x0"), + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); it("binary zero", () => { @@ -66,17 +66,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 9), - Tokens.Operators.Assignment(3, 11), - Tokens.Literals.Numeric.Binary("0b0", 3, 13), - Tokens.Puncuation.Semicolon(3, 16), + Tokens.Type("int"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Binary("0b0"), + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); it("floating-point zero", () => { @@ -89,17 +89,17 @@ class C { let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("C", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(2, 9), + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("C"), + Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("float", 3, 5), - Tokens.Identifiers.FieldName("x", 3, 11), - Tokens.Operators.Assignment(3, 13), - Tokens.Literals.Numeric.Decimal("0.0", 3, 15), - Tokens.Puncuation.Semicolon(3, 18), + Tokens.Type("float"), + Tokens.Identifiers.FieldName("x"), + Tokens.Operators.Assignment, + Tokens.Literals.Numeric.Decimal("0.0"), + Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close(4, 1)]); + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 10b65435e7..bfac4472ff 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -24,30 +24,30 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("IBooom", 4, 12), - Tokens.Identifiers.PropertyName("Property", 4, 19), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Keywords.Get(6, 9), - Tokens.Puncuation.CurlyBrace.Open(6, 13), - Tokens.Keywords.Return(6, 15), - Tokens.Literals.Null(6, 22), - Tokens.Puncuation.Semicolon(6, 26), - Tokens.Puncuation.CurlyBrace.Close(6, 28), - Tokens.Keywords.Set(7, 9), - Tokens.Puncuation.CurlyBrace.Open(7, 13), - Tokens.Variables.ReadWrite("something", 7, 15), - Tokens.Operators.Assignment(7, 25), - Tokens.Variables.ReadWrite("value", 7, 27), - Tokens.Puncuation.Semicolon(7, 32), - Tokens.Puncuation.CurlyBrace.Close(7, 34), - Tokens.Puncuation.CurlyBrace.Close(8, 5), - - Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Return, + Tokens.Literals.Null, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Keywords.Set, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Variables.ReadWrite("something"), + Tokens.Operators.Assignment, + Tokens.Variables.ReadWrite("value"), + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration single line", () => { @@ -60,31 +60,31 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("IBooom", 4, 12), - Tokens.Identifiers.PropertyName("Property", 4, 19), - Tokens.Puncuation.CurlyBrace.Open(4, 28), - Tokens.Keywords.Get(4, 30), - Tokens.Puncuation.CurlyBrace.Open(4, 34), - Tokens.Keywords.Return(4, 36), - Tokens.Literals.Null(4, 43), - Tokens.Puncuation.Semicolon(4, 47), - Tokens.Puncuation.CurlyBrace.Close(4, 49), - Tokens.Keywords.Modifiers.Private(4, 51), - Tokens.Keywords.Set(4, 59), - Tokens.Puncuation.CurlyBrace.Open(4, 63), - Tokens.Variables.ReadWrite("something", 4, 65), - Tokens.Operators.Assignment(4, 75), - Tokens.Variables.ReadWrite("value", 4, 77), - Tokens.Puncuation.Semicolon(4, 82), - Tokens.Puncuation.CurlyBrace.Close(4, 84), - Tokens.Puncuation.CurlyBrace.Close(4, 86), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Return, + Tokens.Literals.Null, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Keywords.Modifiers.Private, + Tokens.Keywords.Set, + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Variables.ReadWrite("something"), + Tokens.Operators.Assignment, + Tokens.Variables.ReadWrite("value"), + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration without modifiers", () => { @@ -97,20 +97,20 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Type("IBooom", 4, 5), - Tokens.Identifiers.PropertyName("Property", 4, 12), - Tokens.Puncuation.CurlyBrace.Open(4, 21), - Tokens.Keywords.Get(4, 22), - Tokens.Puncuation.Semicolon(4, 25), - Tokens.Keywords.Set(4, 27), - Tokens.Puncuation.Semicolon(4, 30), - Tokens.Puncuation.CurlyBrace.Close(4, 31), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Set, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property single line", function () { @@ -123,21 +123,21 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("IBooom", 4, 12), - Tokens.Identifiers.PropertyName("Property", 4, 19), - Tokens.Puncuation.CurlyBrace.Open(4, 28), - Tokens.Keywords.Get(4, 30), - Tokens.Puncuation.Semicolon(4, 33), - Tokens.Keywords.Set(4, 35), - Tokens.Puncuation.Semicolon(4, 38), - Tokens.Puncuation.CurlyBrace.Close(4, 40), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Set, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property single line (protected internal)", function () { @@ -150,22 +150,22 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Protected(4, 5), - Tokens.Keywords.Modifiers.Internal(4, 15), - Tokens.Type("IBooom", 4, 24), - Tokens.Identifiers.PropertyName("Property", 4, 31), - Tokens.Puncuation.CurlyBrace.Open(4, 40), - Tokens.Keywords.Get(4, 42), - Tokens.Puncuation.Semicolon(4, 45), - Tokens.Keywords.Set(4, 47), - Tokens.Puncuation.Semicolon(4, 50), - Tokens.Puncuation.CurlyBrace.Close(4, 52), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Protected, + Tokens.Keywords.Modifiers.Internal, + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Set, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property", () => { @@ -182,21 +182,21 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("IBooom", 4, 12), - Tokens.Identifiers.PropertyName("Property", 4, 19), - Tokens.Puncuation.CurlyBrace.Open(5, 5), - Tokens.Keywords.Get(6, 9), - Tokens.Puncuation.Semicolon(6, 12), - Tokens.Keywords.Set(7, 9), - Tokens.Puncuation.Semicolon(7, 12), - Tokens.Puncuation.CurlyBrace.Close(8, 5), - - Tokens.Puncuation.CurlyBrace.Close(9, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("IBooom"), + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Set, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic auto-property", () => { @@ -209,31 +209,31 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("Dictionary", 4, 12), - Tokens.Puncuation.TypeParameters.Begin(4, 22), - Tokens.Type("string", 4, 23), - Tokens.Puncuation.Comma(4, 29), - Tokens.Type("List", 4, 31), - Tokens.Puncuation.TypeParameters.Begin(4, 35), - Tokens.Type("T", 4, 36), - Tokens.Puncuation.TypeParameters.End(4, 37), - Tokens.Puncuation.SquareBracket.Open(4, 38), - Tokens.Puncuation.SquareBracket.Close(4, 39), - Tokens.Puncuation.TypeParameters.End(4, 40), - Tokens.Identifiers.PropertyName("Property", 4, 42), - Tokens.Puncuation.CurlyBrace.Open(4, 51), - Tokens.Keywords.Get(4, 53), - Tokens.Puncuation.Semicolon(4, 56), - Tokens.Keywords.Set(4, 58), - Tokens.Puncuation.Semicolon(4, 61), - Tokens.Puncuation.CurlyBrace.Close(4, 63), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.Comma, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Puncuation.TypeParameters.End, + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Keywords.Set, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property initializer", () => { @@ -247,45 +247,45 @@ class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class(2, 1), - Tokens.Identifiers.ClassName("Tester", 2, 7), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Public(4, 5), - Tokens.Type("Dictionary", 4, 12), - Tokens.Puncuation.TypeParameters.Begin(4, 22), - Tokens.Type("string", 4, 23), - Tokens.Puncuation.Comma(4, 29), - Tokens.Type("List", 4, 31), - Tokens.Puncuation.TypeParameters.Begin(4, 35), - Tokens.Type("T", 4, 36), - Tokens.Puncuation.TypeParameters.End(4, 37), - Tokens.Puncuation.SquareBracket.Open(4, 38), - Tokens.Puncuation.SquareBracket.Close(4, 39), - Tokens.Puncuation.TypeParameters.End(4, 40), - Tokens.Identifiers.PropertyName("Property", 4, 42), - Tokens.Puncuation.CurlyBrace.Open(4, 51), - Tokens.Keywords.Get(4, 53), - Tokens.Puncuation.Semicolon(4, 56), - Tokens.Puncuation.CurlyBrace.Close(4, 58), - Tokens.Operators.Assignment(4, 60), - Tokens.Keywords.New(4, 62), - Tokens.Type("Dictionary", 4, 66), - Tokens.Puncuation.TypeParameters.Begin(4, 76), - Tokens.Type("string", 4, 77), - Tokens.Puncuation.Comma(4, 83), - Tokens.Type("List", 4, 85), - Tokens.Puncuation.TypeParameters.Begin(4, 89), - Tokens.Type("T", 4, 90), - Tokens.Puncuation.TypeParameters.End(4, 91), - Tokens.Puncuation.SquareBracket.Open(4, 92), - Tokens.Puncuation.SquareBracket.Close(4, 93), - Tokens.Puncuation.TypeParameters.End(4, 94), - Tokens.Puncuation.Parenthesis.Open(4, 95), - Tokens.Puncuation.Parenthesis.Close(4, 96), - Tokens.Puncuation.Semicolon(4, 97), - - Tokens.Puncuation.CurlyBrace.Close(5, 1)]); + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Public, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.Comma, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Puncuation.TypeParameters.End, + Tokens.Identifiers.PropertyName("Property"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Keywords.Get, + Tokens.Puncuation.Semicolon, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Operators.Assignment, + Tokens.Keywords.New, + Tokens.Type("Dictionary"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("string"), + Tokens.Puncuation.Comma, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("T"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.SquareBracket.Open, + Tokens.Puncuation.SquareBracket.Close, + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Parenthesis.Open, + Tokens.Puncuation.Parenthesis.Close, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); it("expression body", () => { @@ -300,28 +300,28 @@ public class Tester let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public(2, 1), - Tokens.Keywords.Class(2, 8), - Tokens.Identifiers.ClassName("Tester", 2, 14), - Tokens.Puncuation.CurlyBrace.Open(3, 1), - - Tokens.Keywords.Modifiers.Private(4, 5), - Tokens.Type("string", 4, 13), - Tokens.Identifiers.PropertyName("prop1", 4, 20), - Tokens.Operators.Arrow(4, 26), - Tokens.Puncuation.String.Begin(4, 29), - Tokens.Literals.String("hello", 4, 30), - Tokens.Puncuation.String.End(4, 35), - Tokens.Puncuation.Semicolon(4, 36), - - Tokens.Keywords.Modifiers.Private(5, 5), - Tokens.Type("bool", 5, 13), - Tokens.Identifiers.PropertyName("prop2", 5, 20), - Tokens.Operators.Arrow(5, 26), - Tokens.Literals.Boolean.True(5, 29), - Tokens.Puncuation.Semicolon(5, 33), - - Tokens.Puncuation.CurlyBrace.Close(6, 1)]); + Tokens.Keywords.Modifiers.Public, + Tokens.Keywords.Class, + Tokens.Identifiers.ClassName("Tester"), + Tokens.Puncuation.CurlyBrace.Open, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("string"), + Tokens.Identifiers.PropertyName("prop1"), + Tokens.Operators.Arrow, + Tokens.Puncuation.String.Begin, + Tokens.Literals.String("hello"), + Tokens.Puncuation.String.End, + Tokens.Puncuation.Semicolon, + + Tokens.Keywords.Modifiers.Private, + Tokens.Type("bool"), + Tokens.Identifiers.PropertyName("prop2"), + Tokens.Operators.Arrow, + Tokens.Literals.Boolean.True, + Tokens.Puncuation.Semicolon, + + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index 86306f00cb..b3dc6d74e6 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -19,10 +19,10 @@ struct S { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct(2, 1), - Tokens.Identifiers.StructName("S", 2, 8), - Tokens.Puncuation.CurlyBrace.Open(2, 10), - Tokens.Puncuation.CurlyBrace.Close(2, 12)]); + Tokens.Keywords.Struct, + Tokens.Identifiers.StructName("S"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("struct interface implementation", () => { @@ -35,16 +35,16 @@ struct S : IFoo { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface(2, 1), - Tokens.Identifiers.InterfaceName("IFoo", 2, 11), - Tokens.Puncuation.CurlyBrace.Open(2, 16), - Tokens.Puncuation.CurlyBrace.Close(2, 18), - Tokens.Keywords.Struct(3, 1), - Tokens.Identifiers.StructName("S", 3, 8), - Tokens.Puncuation.Colon(3, 10), - Tokens.Type("IFoo", 3, 12), - Tokens.Puncuation.CurlyBrace.Open(3, 17), - Tokens.Puncuation.CurlyBrace.Close(3, 19)]); + Tokens.Keywords.Interface, + Tokens.Identifiers.InterfaceName("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close, + Tokens.Keywords.Struct, + Tokens.Identifiers.StructName("S"), + Tokens.Puncuation.Colon, + Tokens.Type("IFoo"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic struct", () => { @@ -56,10 +56,10 @@ struct S { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct(2, 1), - Tokens.Identifiers.StructName("S", 2, 8), - Tokens.Puncuation.CurlyBrace.Open(2, 18), - Tokens.Puncuation.CurlyBrace.Close(2, 20)]); + Tokens.Keywords.Struct, + Tokens.Identifiers.StructName("S"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); it("generic struct with constraints", () => { @@ -71,14 +71,14 @@ struct S where T1 : T2 { } let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct(2, 1), - Tokens.Identifiers.StructName("S", 2, 8), - Tokens.Keywords.Where(2, 18), - Tokens.Type("T1", 2, 24), - Tokens.Puncuation.Colon(2, 27), - Tokens.Type("T2", 2, 29), - Tokens.Puncuation.CurlyBrace.Open(2, 32), - Tokens.Puncuation.CurlyBrace.Close(2, 34)]); + Tokens.Keywords.Struct, + Tokens.Identifiers.StructName("S"), + Tokens.Keywords.Where, + Tokens.Type("T1"), + Tokens.Puncuation.Colon, + Tokens.Type("T2"), + Tokens.Puncuation.CurlyBrace.Open, + Tokens.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index 3468de01a8..b8b32411ad 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -18,9 +18,9 @@ using System;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.NamespaceName("System", 2, 7), - Tokens.Puncuation.Semicolon(2, 13)]); + Tokens.Keywords.Using, + Tokens.Identifiers.NamespaceName("System"), + Tokens.Puncuation.Semicolon]); }); it("using static type", () => { @@ -31,12 +31,12 @@ using static System.Console;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Keywords.Static(2, 7), - Tokens.Type("System", 2, 14), - Tokens.Puncuation.Accessor(2, 20), - Tokens.Type("Console", 2, 21), - Tokens.Puncuation.Semicolon(2, 28)]); + Tokens.Keywords.Using, + Tokens.Keywords.Static, + Tokens.Type("System"), + Tokens.Puncuation.Accessor, + Tokens.Type("Console"), + Tokens.Puncuation.Semicolon]); }); it("namespace alias", () => { @@ -47,11 +47,11 @@ using S = System;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.AliasName("S", 2, 7), - Tokens.Operators.Assignment(2, 9), - Tokens.Type("System", 2, 11), - Tokens.Puncuation.Semicolon(2, 17)]); + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("S"), + Tokens.Operators.Assignment, + Tokens.Type("System"), + Tokens.Puncuation.Semicolon]); }); it("type alias", () => { @@ -62,13 +62,13 @@ using C = System.Console;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.AliasName("C", 2, 7), - Tokens.Operators.Assignment(2, 9), - Tokens.Type("System", 2, 11), - Tokens.Puncuation.Accessor(2, 17), - Tokens.Type("Console", 2, 18), - Tokens.Puncuation.Semicolon(2, 25)]); + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("C"), + Tokens.Operators.Assignment, + Tokens.Type("System"), + Tokens.Puncuation.Accessor, + Tokens.Type("Console"), + Tokens.Puncuation.Semicolon]); }); it("type alias with generic type", () => { @@ -79,22 +79,22 @@ using IntList = System.Collections.Generic.List;`; let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.AliasName("IntList", 2, 7), - Tokens.Operators.Assignment(2, 15), - Tokens.Type("System", 2, 17), - Tokens.Puncuation.Accessor(2, 23), - Tokens.Type("Collections", 2, 24), - Tokens.Puncuation.Accessor(2, 35), - Tokens.Type("Generic", 2, 36), - Tokens.Puncuation.Accessor(2, 43), - Tokens.Type("List", 2, 44), - Tokens.Puncuation.TypeParameters.Begin(2, 48), - Tokens.Type("System", 2, 49), - Tokens.Puncuation.Accessor(2, 55), - Tokens.Type("Int32", 2, 56), - Tokens.Puncuation.TypeParameters.End(2, 61), - Tokens.Puncuation.Semicolon(2, 62)]); + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("IntList"), + Tokens.Operators.Assignment, + Tokens.Type("System"), + Tokens.Puncuation.Accessor, + Tokens.Type("Collections"), + Tokens.Puncuation.Accessor, + Tokens.Type("Generic"), + Tokens.Puncuation.Accessor, + Tokens.Type("List"), + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Type("System"), + Tokens.Puncuation.Accessor, + Tokens.Type("Int32"), + Tokens.Puncuation.TypeParameters.End, + Tokens.Puncuation.Semicolon]); }); it("type alias with nested generic types", () => { @@ -105,35 +105,35 @@ using X = System.Collections.Generic.Dictionary { @@ -144,41 +144,41 @@ using X =/**/Dictionary/**//**/>/**/;// let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using(2, 1), - Tokens.Identifiers.AliasName("X", 2, 7), - Tokens.Operators.Assignment(2, 9), - Tokens.Comment.MultiLine.Start(2, 10), - Tokens.Comment.MultiLine.End(2, 12), - Tokens.Type("Dictionary", 2, 14), - Tokens.Comment.MultiLine.Start(2, 24), - Tokens.Comment.MultiLine.End(2, 26), - Tokens.Puncuation.TypeParameters.Begin(2, 28), - Tokens.Comment.MultiLine.Start(2, 29), - Tokens.Comment.MultiLine.End(2, 31), - Tokens.Type("int", 2, 33), - Tokens.Comment.MultiLine.Start(2, 36), - Tokens.Comment.MultiLine.End(2, 38), - Tokens.Puncuation.Comma(2, 40), - Tokens.Comment.MultiLine.Start(2, 41), - Tokens.Comment.MultiLine.End(2, 43), - Tokens.Type("List", 2, 45), - Tokens.Comment.MultiLine.Start(2, 49), - Tokens.Comment.MultiLine.End(2, 51), - Tokens.Puncuation.TypeParameters.Begin(2, 53), - Tokens.Comment.MultiLine.Start(2, 54), - Tokens.Comment.MultiLine.End(2, 56), - Tokens.Type("string", 2, 58), - Tokens.Comment.MultiLine.Start(2, 64), - Tokens.Comment.MultiLine.End(2, 66), - Tokens.Puncuation.TypeParameters.End(2, 68), - Tokens.Comment.MultiLine.Start(2, 69), - Tokens.Comment.MultiLine.End(2, 71), - Tokens.Puncuation.TypeParameters.End(2, 73), - Tokens.Comment.MultiLine.Start(2, 74), - Tokens.Comment.MultiLine.End(2, 76), - Tokens.Puncuation.Semicolon(2, 78), - Tokens.Comment.SingleLine.Start(2, 79), - Tokens.Comment.SingleLine.Text("end", 2, 81)]); + Tokens.Keywords.Using, + Tokens.Identifiers.AliasName("X"), + Tokens.Operators.Assignment, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Type("Dictionary"), + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Type("int"), + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.Comma, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Type("List"), + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.TypeParameters.Begin, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Type("string"), + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.TypeParameters.End, + Tokens.Comment.MultiLine.Start, + Tokens.Comment.MultiLine.End, + Tokens.Puncuation.Semicolon, + Tokens.Comment.SingleLine.Start, + Tokens.Comment.SingleLine.Text("end")]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 7905c7adef..cbc52b8122 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -37,7 +37,7 @@ export function tokenize(input: string | Input, excludeTypes: boolean = true): T const type = token.scopes[token.scopes.length - 1]; if (excludeTypes === false || excludedTypes.indexOf(type) < 0) { - tokens.push(new Token(text, type, lineIndex + 1, token.startIndex + 1)); + tokens.push(new Token(text, type)); } } } @@ -98,324 +98,168 @@ class Tester { export class Token { constructor( public text: string, - public type: string, - public line?: number, - public column?: number) { } + public type: string) { } } export namespace Tokens { - function createToken(text: string, type: string, line?: number, column?: number): Token { - return new Token(text, type, line, column); + function createToken(text: string, type: string): Token { + return new Token(text, type); } export namespace Comment { - export const LeadingWhitespace = (text: string, line?: number, column?: number) => - createToken(text, 'punctuation.whitespace.comment.leading.cs', line, column); + export const LeadingWhitespace = (text: string) => createToken(text, 'punctuation.whitespace.comment.leading.cs'); export namespace MultiLine { - export const End = (line?: number, column?: number) => - createToken('*/', 'punctuation.definition.comment.cs', line, column); + export const End = createToken('*/', 'punctuation.definition.comment.cs'); + export const Start = createToken('/*', 'punctuation.definition.comment.cs'); - export const Start = (line?: number, column?: number) => - createToken('/*', 'punctuation.definition.comment.cs', line, column); - - export const Text = (text: string, line?: number, column?: number) => - createToken(text, 'comment.block.cs', line, column); + export const Text = (text: string) => createToken(text, 'comment.block.cs'); } export namespace SingleLine { - export const Start = (line?: number, column?: number) => - createToken('//', 'punctuation.definition.comment.cs', line, column); + export const Start = createToken('//', 'punctuation.definition.comment.cs'); - export const Text = (text: string, line?: number, column?: number) => - createToken(text, 'comment.line.double-slash.cs', line, column); + export const Text = (text: string) => createToken(text, 'comment.line.double-slash.cs'); } } export namespace Identifiers { - export const AliasName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.alias.cs', line, column); - - export const ClassName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.class.cs', line, column); - - export const DelegateName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.delegate.cs', line, column); - - export const EnumName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.enum.cs', line, column); - - export const EventName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.variable.event.cs', line, column); - - export const FieldName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.variable.field.cs', line, column); - - export const InterfaceName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.interface.cs', line, column); - - export const MethodName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.function.cs', line, column); - - export const NamespaceName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.namespace.cs', line, column); - - export const PropertyName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.variable.property.cs', line, column); - - export const StructName = (text: string, line?: number, column?: number) => - createToken(text, 'entity.name.type.struct.cs', line, column); + export const AliasName = (text: string) => createToken(text, 'entity.name.type.alias.cs'); + export const ClassName = (text: string) => createToken(text, 'entity.name.type.class.cs'); + export const DelegateName = (text: string) => createToken(text, 'entity.name.type.delegate.cs'); + export const EnumName = (text: string) => createToken(text, 'entity.name.type.enum.cs'); + export const EventName = (text: string) => createToken(text, 'entity.name.variable.event.cs'); + export const FieldName = (text: string) => createToken(text, 'entity.name.variable.field.cs'); + export const InterfaceName = (text: string) => createToken(text, 'entity.name.type.interface.cs'); + export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); + export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); + export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); + export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); } export namespace Keywords { export namespace Modifiers { - export const Abstract = (line?: number, column?: number) => - createToken('abstract', 'storage.modifier.cs', line, column); - - export const Const = (line?: number, column?: number) => - createToken('const', 'storage.modifier.cs', line, column); - - export const Internal = (line?: number, column?: number) => - createToken('internal', 'storage.modifier.cs', line, column); - - export const New = (line?: number, column?: number) => - createToken('new', 'storage.modifier.cs', line, column); - - export const Out = (line?: number, column?: number) => - createToken('out', 'storage.modifier.cs', line, column); - - export const Params = (line?: number, column?: number) => - createToken('params', 'storage.modifier.cs', line, column); - - export const Partial = (line?: number, column?: number) => - createToken('partial', 'storage.modifier.cs', line, column); - - export const Private = (line?: number, column?: number) => - createToken('private', 'storage.modifier.cs', line, column); - - export const Protected = (line?: number, column?: number) => - createToken('protected', 'storage.modifier.cs', line, column); - - export const Public = (line?: number, column?: number) => - createToken('public', 'storage.modifier.cs', line, column); - - export const ReadOnly = (line?: number, column?: number) => - createToken('readonly', 'storage.modifier.cs', line, column); - - export const Ref = (line?: number, column?: number) => - createToken('ref', 'storage.modifier.cs', line, column); - - export const Sealed = (line?: number, column?: number) => - createToken('sealed', 'storage.modifier.cs', line, column); - - export const Static = (line?: number, column?: number) => - createToken('static', 'storage.modifier.cs', line, column); + export const Abstract = createToken('abstract', 'storage.modifier.cs'); + export const Const = createToken('const', 'storage.modifier.cs'); + export const Internal = createToken('internal', 'storage.modifier.cs'); + export const New = createToken('new', 'storage.modifier.cs'); + export const Out = createToken('out', 'storage.modifier.cs'); + export const Params = createToken('params', 'storage.modifier.cs'); + export const Partial = createToken('partial', 'storage.modifier.cs'); + export const Private = createToken('private', 'storage.modifier.cs'); + export const Protected = createToken('protected', 'storage.modifier.cs'); + export const Public = createToken('public', 'storage.modifier.cs'); + export const ReadOnly = createToken('readonly', 'storage.modifier.cs'); + export const Ref = createToken('ref', 'storage.modifier.cs'); + export const Sealed = createToken('sealed', 'storage.modifier.cs'); + export const Static = createToken('static', 'storage.modifier.cs'); } - export const Add = (line?: number, column?: number) => - createToken('add', 'keyword.other.add.cs', line, column); - - export const Alias = (line?: number, column?: number) => - createToken('alias', 'keyword.other.alias.cs', line, column); - - export const AttributeSpecifier = (text: string, line?: number, column?: number) => - createToken(text, 'keyword.other.attribute-specifier.cs', line, column); - - export const Class = (line?: number, column?: number) => - createToken('class', 'keyword.other.class.cs', line, column); - - export const Delegate = (line?: number, column?: number) => - createToken('delegate', 'keyword.other.delegate.cs', line, column); - - export const Enum = (line?: number, column?: number) => - createToken('enum', 'keyword.other.enum.cs', line, column); - - export const Event = (line?: number, column?: number) => - createToken('event', 'keyword.other.event.cs', line, column); - - export const Extern = (line?: number, column?: number) => - createToken('extern', 'keyword.other.extern.cs', line, column); - - export const Get = (line?: number, column?: number) => - createToken('get', 'keyword.other.get.cs', line, column); - - export const Interface = (line?: number, column?: number) => - createToken('interface', 'keyword.other.interface.cs', line, column); - - export const Namespace = (line?: number, column?: number) => - createToken('namespace', 'keyword.other.namespace.cs', line, column); - - export const New = (line?: number, column?: number) => - createToken('new', 'keyword.other.new.cs', line, column); - - export const Remove = (line?: number, column?: number) => - createToken('remove', 'keyword.other.remove.cs', line, column); - - export const Return = (line?: number, column?: number) => - createToken('return', 'keyword.control.flow.cs', line, column); - - export const Set = (line?: number, column?: number) => - createToken('set', 'keyword.other.set.cs', line, column); - - export const Static = (line?: number, column?: number) => - createToken('static', 'keyword.other.static.cs', line, column); - - export const Struct = (line?: number, column?: number) => - createToken('struct', 'keyword.other.struct.cs', line, column); - - export const This = (line?: number, column?: number) => - createToken('this', 'keyword.other.this.cs', line, column); - - export const Using = (line?: number, column?: number) => - createToken('using', 'keyword.other.using.cs', line, column); - - export const Where = (line?: number, column?: number) => - createToken('where', 'keyword.other.where.cs', line, column); + export const Add = createToken('add', 'keyword.other.add.cs'); + export const Alias = createToken('alias', 'keyword.other.alias.cs'); + export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); + export const Class = createToken('class', 'keyword.other.class.cs'); + export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); + export const Enum = createToken('enum', 'keyword.other.enum.cs'); + export const Event = createToken('event', 'keyword.other.event.cs'); + export const Extern = createToken('extern', 'keyword.other.extern.cs'); + export const Get = createToken('get', 'keyword.other.get.cs'); + export const Interface = createToken('interface', 'keyword.other.interface.cs'); + export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); + export const New = createToken('new', 'keyword.other.new.cs'); + export const Remove = createToken('remove', 'keyword.other.remove.cs'); + export const Return = createToken('return', 'keyword.control.flow.cs'); + export const Set = createToken('set', 'keyword.other.set.cs'); + export const Static = createToken('static', 'keyword.other.static.cs'); + export const Struct = createToken('struct', 'keyword.other.struct.cs'); + export const This = createToken('this', 'keyword.other.this.cs'); + export const Using = createToken('using', 'keyword.other.using.cs'); + export const Where = createToken('where', 'keyword.other.where.cs'); } export namespace Literals { export namespace Boolean { - export const False = (line?: number, column?: number) => - createToken('false', 'constant.language.boolean.false.cs', line, column); - - export const True = (line?: number, column?: number) => - createToken('true', 'constant.language.boolean.true.cs', line, column); + export const False = createToken('false', 'constant.language.boolean.false.cs'); + export const True = createToken('true', 'constant.language.boolean.true.cs'); } - export const Null = (line?: number, column?: number) => - createToken('null', 'constant.language.null.cs', line, column); + export const Null = createToken('null', 'constant.language.null.cs'); export namespace Numeric { - export const Binary = (text: string, line?: number, column?: number) => - createToken(text, 'constant.numeric.binary.cs', line, column); - - export const Decimal = (text: string, line?: number, column?: number) => - createToken(text, 'constant.numeric.decimal.cs', line, column); - - export const Hexadecimal = (text: string, line?: number, column?: number) => - createToken(text, 'constant.numeric.hex.cs', line, column); + export const Binary = (text: string) => createToken(text, 'constant.numeric.binary.cs'); + export const Decimal = (text: string) => createToken(text, 'constant.numeric.decimal.cs'); + export const Hexadecimal = (text: string) => createToken(text, 'constant.numeric.hex.cs'); } - export const String = (text: string, line?: number, column?: number) => - createToken(text, 'string.quoted.double.cs', line, column); + export const String = (text: string) => createToken(text, 'string.quoted.double.cs'); } export namespace Operators { - export const Arrow = (line?: number, column?: number) => - createToken('=>', 'keyword.operator.arrow.cs', line, column); + export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); export namespace Arithmetic { - export const Addition = (line?: number, column?: number) => - createToken('+', 'keyword.operator.arithmetic.cs', line, column); - - export const Division = (line?: number, column?: number) => - createToken('/', 'keyword.operator.arithmetic.cs', line, column); - - export const Multiplication = (line?: number, column?: number) => - createToken('*', 'keyword.operator.arithmetic.cs', line, column); - - export const Remainder = (line?: number, column?: number) => - createToken('%', 'keyword.operator.arithmetic.cs', line, column); - - export const Subtraction = (line?: number, column?: number) => - createToken('-', 'keyword.operator.arithmetic.cs', line, column); + export const Addition = createToken('+', 'keyword.operator.arithmetic.cs'); + export const Division = createToken('/', 'keyword.operator.arithmetic.cs'); + export const Multiplication = createToken('*', 'keyword.operator.arithmetic.cs'); + export const Remainder = createToken('%', 'keyword.operator.arithmetic.cs'); + export const Subtraction = createToken('-', 'keyword.operator.arithmetic.cs'); } - export const Assignment = (line?: number, column?: number) => - createToken('=', 'keyword.operator.assignment.cs', line, column); + export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); } export namespace Puncuation { - export const Accessor = (line?: number, column?: number) => - createToken('.', 'punctuation.accessor.cs', line, column); - - export const Colon = (line?: number, column?: number) => - createToken(':', 'punctuation.separator.colon.cs', line, column); - - export const Comma = (line?: number, column?: number) => - createToken(',', 'punctuation.separator.comma.cs', line, column); + export const Accessor = createToken('.', 'punctuation.accessor.cs'); + export const Colon = createToken(':', 'punctuation.separator.colon.cs'); + export const Comma = createToken(',', 'punctuation.separator.comma.cs'); export namespace CurlyBrace { - export const Close = (line?: number, column?: number) => - createToken('}', 'punctuation.curlybrace.close.cs', line, column); - - export const Open = (line?: number, column?: number) => - createToken('{', 'punctuation.curlybrace.open.cs', line, column); + export const Close = createToken('}', 'punctuation.curlybrace.close.cs'); + export const Open = createToken('{', 'punctuation.curlybrace.open.cs'); } export namespace Interpolation { - export const Begin = (line?: number, column?: number) => - createToken('{', 'punctuation.definition.interpolation.begin.cs', line, column); - - export const End = (line?: number, column?: number) => - createToken('}', 'punctuation.definition.interpolation.end.cs', line, column); + export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); + export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); } export namespace InterpolatedString { - export const Begin = (line?: number, column?: number) => - createToken('$"', 'punctuation.definition.string.begin.cs', line, column); - - export const End = (line?: number, column?: number) => - createToken('"', 'punctuation.definition.string.end.cs', line, column); - - export const VerbatimBegin = (line?: number, column?: number) => - createToken('$@"', 'punctuation.definition.string.begin.cs', line, column); + export const Begin = createToken('$"', 'punctuation.definition.string.begin.cs'); + export const End = createToken('"', 'punctuation.definition.string.end.cs'); + export const VerbatimBegin = createToken('$@"', 'punctuation.definition.string.begin.cs'); } export namespace Parenthesis { - export const Close = (line?: number, column?: number) => - createToken(')', 'punctuation.parenthesis.close.cs', line, column); - - export const Open = (line?: number, column?: number) => - createToken('(', 'punctuation.parenthesis.open.cs', line, column); + export const Close = createToken(')', 'punctuation.parenthesis.close.cs'); + export const Open = createToken('(', 'punctuation.parenthesis.open.cs'); } - export const Semicolon = (line?: number, column?: number) => - createToken(';', 'punctuation.terminator.statement.cs', line, column); + export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); export namespace SquareBracket { - export const Close = (line?: number, column?: number) => - createToken(']', 'punctuation.squarebracket.close.cs', line, column); - - export const Open = (line?: number, column?: number) => - createToken('[', 'punctuation.squarebracket.open.cs', line, column); + export const Close = createToken(']', 'punctuation.squarebracket.close.cs'); + export const Open = createToken('[', 'punctuation.squarebracket.open.cs'); } export namespace String { - export const Begin = (line?: number, column?: number) => - createToken('"', 'punctuation.definition.string.begin.cs', line, column); - - export const End = (line?: number, column?: number) => - createToken('"', 'punctuation.definition.string.end.cs', line, column); + export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); + export const End = createToken('"', 'punctuation.definition.string.end.cs'); } export namespace TypeParameters { - export const Begin = (line?: number, column?: number) => - createToken('<', 'punctuation.definition.typeparameters.begin.cs', line, column); - - export const End = (line?: number, column?: number) => - createToken('>', 'punctuation.definition.typeparameters.end.cs', line, column); + export const Begin = createToken('<', 'punctuation.definition.typeparameters.begin.cs'); + export const End = createToken('>', 'punctuation.definition.typeparameters.end.cs'); } } export namespace Variables { - export const Alias = (text: string, line?: number, column?: number) => - createToken(text, 'variable.other.alias.cs', line, column); - - export const EnumMember = (text: string, line?: number, column?: number) => - createToken(text, 'variable.other.enummember.cs', line, column); - - export const Parameter = (text: string, line?: number, column?: number) => - createToken(text, 'variable.parameter.cs', line, column); - - export const ReadWrite = (text: string, line?: number, column?: number) => - createToken(text, 'variable.other.readwrite.cs', line, column); + export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); + export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); + export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); + export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); } - export const IllegalNewLine = (text: string, line?: number, column?: number) => - createToken(text, 'invalid.illegal.newline.cs', line, column); - - export const Type = (text: string, line?: number, column?: number) => - createToken(text, 'storage.type.cs', line, column); + export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); + export const Type = (text: string) => createToken(text, 'storage.type.cs'); } From 57bf1bfe35615385ce12a992b2ec0a89ee14e95e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 13:30:04 -0800 Subject: [PATCH 040/192] Test only the parts that matter --- test/syntaxes/attributes.test.syntax.ts | 60 ++---- test/syntaxes/boolean-literals.test.syntax.ts | 34 +--- test/syntaxes/classes.test.syntax.ts | 110 ++++------- test/syntaxes/comments.test.syntax.ts | 18 +- test/syntaxes/delegates.test.syntax.ts | 23 +-- test/syntaxes/enums.test.syntax.ts | 30 +-- test/syntaxes/events.test.syntax.ts | 96 ++-------- test/syntaxes/extern-aliases.test.syntax.ts | 4 +- test/syntaxes/fields.test.syntax.ts | 124 +++---------- .../interation-statements.test.syntax.ts | 20 +- test/syntaxes/interfaces.test.syntax.ts | 30 +-- .../interpolated-strings.test.syntax.ts | 173 ++++-------------- test/syntaxes/methods.test.syntax.ts | 51 ++---- test/syntaxes/numeric-literals.test.syntax.ts | 66 ++----- test/syntaxes/properties.test.syntax.ts | 152 ++++----------- test/syntaxes/structs.test.syntax.ts | 16 +- test/syntaxes/using-directives.test.syntax.ts | 42 ++--- test/syntaxes/utils/tokenizer.ts | 19 +- 18 files changed, 270 insertions(+), 798 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index a11ce8e751..cac20ee72b 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -12,10 +12,8 @@ describe("Grammar", () => { describe("Attributes", () => { it("global attribute", () => { - const input = ` -[Foo]`; - - let tokens = tokenize(input); + const input = `[Foo]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -25,10 +23,8 @@ describe("Grammar", () => { it("global attribute with specifier", () => { - const input = ` -[assembly: Foo]`; - - let tokens = tokenize(input); + const input = `[assembly: Foo]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -40,10 +36,8 @@ describe("Grammar", () => { it("Two global attributes in same section with specifier", () => { - const input = ` -[module: Foo, Bar]`; - - let tokens = tokenize(input); + const input = `[module: Foo, Bar]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -57,10 +51,8 @@ describe("Grammar", () => { it("Two global attributes in same section with specifier and empty argument lists", () => { - const input = ` -[module: Foo(), Bar()]`; - - let tokens = tokenize(input); + const input = `[module: Foo(), Bar()]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -78,10 +70,8 @@ describe("Grammar", () => { it("Global attribute with one argument", () => { - const input = ` -[Foo(true)]`; - - let tokens = tokenize(input); + const input = `[Foo(true)]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -94,10 +84,8 @@ describe("Grammar", () => { it("Global attribute with two arguments", () => { - const input = ` -[Foo(true, 42)]`; - - let tokens = tokenize(input); + const input = `[Foo(true, 42)]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -112,10 +100,8 @@ describe("Grammar", () => { it("Global attribute with three arguments", () => { - const input = ` -[Foo(true, 42, "text")]`; - - let tokens = tokenize(input); + const input = `[Foo(true, 42, "text")]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -134,10 +120,8 @@ describe("Grammar", () => { it("Global attribute with named argument", () => { - const input = ` -[Foo(Bar = 42)]`; - - let tokens = tokenize(input); + const input = `[Foo(Bar = 42)]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -152,10 +136,8 @@ describe("Grammar", () => { it("Global attribute with one positional argument and one named argument", () => { - const input = ` -[Foo(true, Bar = 42)]`; - - let tokens = tokenize(input); + const input = `[Foo(true, Bar = 42)]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, @@ -172,10 +154,8 @@ describe("Grammar", () => { it("Global attribute with specifier, one positional argument, and two named arguments", () => { - const input = ` -[module: Foo(true, Bar = 42, Baz = "hello")]`; - - let tokens = tokenize(input); + const input = `[module: Foo(true, Bar = 42, Baz = "hello")]`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Puncuation.SquareBracket.Open, diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index feb891419b..809686175f 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,48 +12,28 @@ describe("Grammar", () => { describe("Literals - boolean", () => { it("true", () => { - const input = ` -class C { - bool x = true; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`bool x = true;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("bool"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("false", () => { - const input = ` -class C { - bool x = false; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`bool x = false;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("bool"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Boolean.False, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 1ec8212daf..24236769fc 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,34 +12,28 @@ describe("Grammar", () => { describe("Class", () => { it("class keyword and storage modifiers", () => { - const input = ` -namespace TestNamespace -{ - public class PublicClass { } + const input = Input.InNamespace(` +public class PublicClass { } - class DefaultClass { } + class DefaultClass { } - internal class InternalClass { } +internal class InternalClass { } - static class DefaultStaticClass { } + static class DefaultStaticClass { } - public static class PublicStaticClass { } +public static class PublicStaticClass { } - sealed class DefaultSealedClass { } + sealed class DefaultSealedClass { } - public sealed class PublicSealedClass { } +public sealed class PublicSealedClass { } - public abstract class PublicAbstractClass { } +public abstract class PublicAbstractClass { } - abstract class DefaultAbstractClass { } -}`; - let tokens = tokenize(input); + abstract class DefaultAbstractClass { }`); - tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, + const tokens = tokenize(input); + tokens.should.deep.equal([ Tokens.Keywords.Modifiers.Public, Tokens.Keywords.Class, Tokens.Identifiers.ClassName("PublicClass"), @@ -94,49 +88,31 @@ namespace TestNamespace Tokens.Keywords.Class, Tokens.Identifiers.ClassName("DefaultAbstractClass"), Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("generics in identifier", () => { - const input = ` -namespace TestNamespace -{ - class Dictionary { } -}`; - let tokens = tokenize(input); + const input = Input.InNamespace(`class Dictionary { }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Class, Tokens.Identifiers.ClassName("Dictionary"), Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("inheritance", () => { - const input = ` -namespace TestNamespace -{ - class PublicClass : IInterface, IInterfaceTwo { } - class PublicClass : Root.IInterface, Something.IInterfaceTwo { } - class PublicClass : Dictionary>, IMap> { } -}`; - let tokens = tokenize(input); + const input = Input.InNamespace(` +class PublicClass : IInterface, IInterfaceTwo { } +class PublicClass : Root.IInterface, Something.IInterfaceTwo { } +class PublicClass : Dictionary>, IMap> { }`); - tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, + const tokens = tokenize(input); + tokens.should.deep.equal([ Tokens.Keywords.Class, Tokens.Identifiers.ClassName("PublicClass"), Tokens.Puncuation.Colon, @@ -191,30 +167,21 @@ namespace TestNamespace Tokens.Puncuation.TypeParameters.End, Tokens.Puncuation.TypeParameters.End, Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("generic constraints", () => { - const input = ` -namespace TestNamespace + const input = Input.InNamespace(` +class PublicClass where T : ISomething { } +class PublicClass : Dictionary[]>, ISomething + where T : ICar, new() + where X : struct { - class PublicClass where T : ISomething { } - class PublicClass : Dictionary[]>, ISomething - where T : ICar, new() - where X : struct - { - } -}`; - let tokens = tokenize(input); +}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Class, Tokens.Identifiers.ClassName("PublicClass"), Tokens.Keywords.Where, @@ -253,31 +220,22 @@ namespace TestNamespace Tokens.Puncuation.Colon, Tokens.Keywords.Struct, Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("nested class", () => { - const input = ` -namespace TestNamespace + const input = Input.InNamespace(` +class Klass { - class Klass + public class Nested { - public class Nested - { - } } -}`; - let tokens = tokenize(input); +}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Class, Tokens.Identifiers.ClassName("Klass"), Tokens.Puncuation.CurlyBrace.Open, @@ -288,8 +246,6 @@ namespace TestNamespace Tokens.Puncuation.CurlyBrace.Open, Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); }); diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 12274ba1f6..1620179eeb 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -12,10 +12,8 @@ describe("Grammar", () => { describe("Comments", () => { it("single-line comment", () => { - const input = ` -// foo`; - - let tokens = tokenize(input); + const input = `// foo`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.SingleLine.Start, @@ -24,10 +22,8 @@ describe("Grammar", () => { it("single-line comment after whitespace", () => { - const input = ` - // foo`; - - let tokens = tokenize(input); + const input = ` // foo`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.LeadingWhitespace(" "), @@ -37,10 +33,8 @@ describe("Grammar", () => { it("multi-line comment", () => { - const input = ` -/* foo */`; - - let tokens = tokenize(input); + const input = `/* foo */`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Comment.MultiLine.Start, diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index fba8080a03..7654d37d9c 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -12,11 +12,8 @@ describe("Grammar", () => { describe("Delegates", () => { it("void delegate with no parameters", () => { - const input = ` -delegate void D(); -`; - - let tokens = tokenize(input); + const input = `delegate void D();`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate, @@ -29,11 +26,8 @@ delegate void D(); it("generic delegate with variance", () => { - const input = ` -delegate TResult D(T arg1); -`; - - let tokens = tokenize(input); + const input = `delegate TResult D(T arg1);`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate, @@ -53,7 +47,7 @@ delegate void D() where T1 : T2; `; - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate, @@ -70,11 +64,8 @@ delegate void D() it("delegate with multiple parameters", () => { - const input = ` -delegate int D(ref string x, out int y, params object[] z); -`; - - let tokens = tokenize(input); + const input = `delegate int D(ref string x, out int y, params object[] z);`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Delegate, diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 49b86969ad..496067d7bd 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -12,11 +12,8 @@ describe("Grammar", () => { describe("Enums", () => { it("simple enum", () => { - const input = ` -enum E { } -`; - - let tokens = tokenize(input); + const input = `enum E { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum, @@ -27,11 +24,8 @@ enum E { } it("enum with base type", () => { - const input = ` -enum E : byte { } -`; - - let tokens = tokenize(input); + const input = `enum E : byte { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum, @@ -44,11 +38,8 @@ enum E : byte { } it("enum with single member", () => { - const input = ` -enum E { M1 } -`; - - let tokens = tokenize(input); + const input = `enum E { M1 }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum, @@ -60,11 +51,8 @@ enum E { M1 } it("enum with multiple members", () => { - const input = ` -enum Color { Red, Green, Blue } -`; - - let tokens = tokenize(input); + const input = `enum Color { Red, Green, Blue }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum, @@ -89,7 +77,7 @@ enum E } `; - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Enum, diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 892a4b21e0..7d757472ef 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,98 +12,52 @@ describe("Grammar", () => { describe("Events", () => { it("declaration", () => { - const input = ` -public class Tester -{ - public event Type Event; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`public event Type Event;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Keywords.Event, Tokens.Type("Type"), Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("declaration with multiple modifiers", () => { - const input = ` -public class Tester -{ - protected internal event Type Event; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`protected internal event Type Event;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Protected, Tokens.Keywords.Modifiers.Internal, Tokens.Keywords.Event, Tokens.Type("Type"), Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("declaration with multiple declarators", () => { - const input = ` -public class Tester -{ - public event Type Event1, Event2; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`public event Type Event1, Event2;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Keywords.Event, Tokens.Type("Type"), Tokens.Identifiers.EventName("Event1"), Tokens.Puncuation.Comma, Tokens.Identifiers.EventName("Event2"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("generic", () => { - const input = ` -public class Tester -{ - public event EventHandler, Dictionary> Event; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`public event EventHandler, Dictionary> Event;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Keywords.Event, Tokens.Type("EventHandler"), @@ -121,31 +75,21 @@ public class Tester Tokens.Puncuation.TypeParameters.End, Tokens.Puncuation.TypeParameters.End, Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("declaration with accessors", () => { - const input = ` -public class Tester + const input = Input.InClass(` +public event Type Event { - public event Type Event - { - add { } - remove { } - } -}`; + add { } + remove { } +}`); - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Keywords.Event, Tokens.Type("Type"), @@ -157,8 +101,6 @@ public class Tester Tokens.Keywords.Remove, Tokens.Puncuation.CurlyBrace.Open, Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); }); diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index 4febcdd9c7..da4770b1ee 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -10,13 +10,13 @@ describe("Grammar", () => { before(() => should()); describe("Extern aliases", () => { - it("simple", () => { + it("declaration", () => { const input = ` extern alias X; extern alias Y;`; - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Extern, diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 9b38c85ac4..c6d5a2e897 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -4,30 +4,22 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); - describe("Field", function () { - it("declaration", function () { + describe("Field", () => { + it("declaration", () => { - const input = ` -public class Tester -{ - private List _field; - private List field; - private List field123; -}`; + const input = Input.InClass(` +private List _field; +private List field; +private List field123;`); - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Private, Tokens.Type("List"), Tokens.Identifiers.FieldName("_field"), @@ -41,27 +33,15 @@ public class Tester Tokens.Keywords.Modifiers.Private, Tokens.Type("List"), Tokens.Identifiers.FieldName("field123"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("generic", () => { - const input = ` -public class Tester -{ - private Dictionary< List, Dictionary> _field; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`private Dictionary< List, Dictionary> _field;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Private, Tokens.Type("Dictionary"), Tokens.Puncuation.TypeParameters.Begin, @@ -78,30 +58,20 @@ public class Tester Tokens.Puncuation.TypeParameters.End, Tokens.Puncuation.TypeParameters.End, Tokens.Identifiers.FieldName("_field"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("modifiers", () => { - const input = ` -public class Tester -{ - private static readonly List _field; - readonly string _field2; - string _field3; -}`; + const input = Input.InClass(` +private static readonly List _field; +readonly string _field2; +string _field3;`); - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Private, Tokens.Keywords.Modifiers.Static, Tokens.Keywords.Modifiers.ReadOnly, @@ -116,28 +86,18 @@ public class Tester Tokens.Type("string"), Tokens.Identifiers.FieldName("_field3"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("types", () => { - const input = ` -public class Tester -{ - string field123; - string[] field123; -}`; + const input = Input.InClass(` +string field123; +string[] field123;`); - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("field123"), Tokens.Puncuation.Semicolon, @@ -146,28 +106,18 @@ public class Tester Tokens.Puncuation.SquareBracket.Open, Tokens.Puncuation.SquareBracket.Close, Tokens.Identifiers.FieldName("field123"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("assignment", () => { - const input = ` -public class Tester -{ - private string field = "hello"; - const bool field = true; -}`; + const input = Input.InClass(` +private string field = "hello"; +const bool field = true;`); let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Private, Tokens.Type("string"), Tokens.Identifiers.FieldName("field"), @@ -182,27 +132,15 @@ public class Tester Tokens.Identifiers.FieldName("field"), Tokens.Operators.Assignment, Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("declaration with multiple declarators", () => { - const input = ` -public class Tester -{ - int x = 19, y = 23, z = 42; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`int x = 19, y = 23, z = 42;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, @@ -215,9 +153,7 @@ public class Tester Tokens.Identifiers.FieldName("z"), Tokens.Operators.Assignment, Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); diff --git a/test/syntaxes/interation-statements.test.syntax.ts b/test/syntaxes/interation-statements.test.syntax.ts index 4a98903094..823d7ff2fd 100644 --- a/test/syntaxes/interation-statements.test.syntax.ts +++ b/test/syntaxes/interation-statements.test.syntax.ts @@ -12,25 +12,11 @@ describe("Grammar", () => { describe.skip("Iteration statements (loops)", () => { it("single-line declaration with no parameters", () => { - const input = Input.InMethod(` -while (true) { } -`); - - let tokens = tokenize(input); + const input = Input.InMethod(`while (true) { }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - - Tokens.Type("void"), - Tokens.Identifiers.MethodName("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Puncuation.CurlyBrace.Close]); + ]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 0de88364a9..a0152bf643 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -12,11 +12,8 @@ describe("Grammar", () => { describe("Interfaces", () => { it("simple interface", () => { - const input = ` -interface IFoo { } -`; - - let tokens = tokenize(input); + const input = `interface IFoo { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, @@ -32,7 +29,7 @@ interface IFoo { } interface IBar : IFoo { } `; - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, @@ -49,11 +46,8 @@ interface IBar : IFoo { } it("generic interface", () => { - const input = ` -interface IFoo { } -`; - - let tokens = tokenize(input); + const input = `interface IFoo { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, @@ -64,11 +58,8 @@ interface IFoo { } it("generic interface with variance", () => { - const input = ` -interface IFoo { } -`; - - let tokens = tokenize(input); + const input = `interface IFoo { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, @@ -79,11 +70,8 @@ interface IFoo { } it("generic interface with constraints", () => { - const input = ` -interface IFoo where T1 : T2 { } -`; - - let tokens = tokenize(input); + const input = `interface IFoo where T1 : T2 { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index 51da3c80ab..da401d6129 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,20 +12,10 @@ describe("Grammar", () => { describe("Interpolated strings", () => { it("two interpolations", () => { - const input = ` -public class Tester -{ - string test = $"hello {one} world {two}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $"hello {one} world {two}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, @@ -40,81 +30,45 @@ public class Tester Tokens.Puncuation.Interpolation.End, Tokens.Literals.String("!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("no interpolations", () => { - const input = ` -public class Tester -{ - string test = $"hello world!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $"hello world!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, Tokens.Puncuation.InterpolatedString.Begin, Tokens.Literals.String("hello world!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("no interpolations due to escaped braces", () => { - const input = ` -public class Tester -{ - string test = $"hello {{one}} world {{two}}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $"hello {{one}} world {{two}}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, Tokens.Puncuation.InterpolatedString.Begin, Tokens.Literals.String("hello {{one}} world {{two}}!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("two interpolations with escaped braces", () => { - const input = ` -public class Tester -{ - string test = $"hello {{{one}}} world {{{two}}}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $"hello {{{one}}} world {{{two}}}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, @@ -131,55 +85,32 @@ public class Tester Tokens.Puncuation.Interpolation.End, Tokens.Literals.String("}}!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("no interpolations due to double-escaped braces", () => { - const input = ` -public class Tester -{ - string test = $"hello {{{{one}}}} world {{{{two}}}}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $"hello {{{{one}}}} world {{{{two}}}}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, Tokens.Puncuation.InterpolatedString.Begin, Tokens.Literals.String("hello {{{{one}}}} world {{{{two}}}}!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("break across two lines (non-verbatim)", () => { - const input = ` -public class Tester -{ - string test = $"hello -world!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(` +string test = $"hello +world!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, @@ -195,20 +126,10 @@ world!"; it("verbatim with two interpolations", () => { - const input = ` -public class Tester -{ - string test = $@"hello {one} world {two}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`string test = $@"hello {one} world {two}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, @@ -223,28 +144,17 @@ public class Tester Tokens.Puncuation.Interpolation.End, Tokens.Literals.String("!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("break across two lines with two interpolations (verbatim)", () => { - const input = ` -public class Tester -{ - string test = $@"hello {one} - world {two}!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(` +string test = $@"hello {one} +world {two}!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, @@ -253,44 +163,31 @@ public class Tester Tokens.Puncuation.Interpolation.Begin, Tokens.Variables.ReadWrite("one"), Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String(" world "), + Tokens.Literals.String("world "), Tokens.Puncuation.Interpolation.Begin, Tokens.Variables.ReadWrite("two"), Tokens.Puncuation.Interpolation.End, Tokens.Literals.String("!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("break across two lines with no interpolations (verbatim)", () => { - const input = ` -public class Tester -{ - string test = $@"hello - world!"; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(` +string test = $@"hello +world!";`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("string"), Tokens.Identifiers.FieldName("test"), Tokens.Operators.Assignment, Tokens.Puncuation.InterpolatedString.VerbatimBegin, Tokens.Literals.String("hello"), - Tokens.Literals.String(" world!"), + Tokens.Literals.String("world!"), Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 9059c8e1c3..3a4808fba6 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,45 +12,28 @@ describe("Grammar", () => { describe("Methods", () => { it("single-line declaration with no parameters", () => { - const input = ` -class Tester -{ - void Foo() { } -}`; - let tokens = tokenize(input); + const input = Input.InClass(`void Foo() { }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("void"), Tokens.Identifiers.MethodName("Foo"), Tokens.Puncuation.Parenthesis.Open, Tokens.Puncuation.Parenthesis.Close, Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration with two parameters", () => { - const input = ` -class Tester + const input = Input.InClass(` +int Add(int x, int y) { - int Add(int x, int y) - { - return x + y; - } -}`; - let tokens = tokenize(input); + return x + y; +}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.MethodName("Add"), Tokens.Puncuation.Parenthesis.Open, @@ -66,25 +49,15 @@ class Tester Tokens.Operators.Arithmetic.Addition, Tokens.Variables.ReadWrite("y"), Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("expression body", () => { - const input = ` -class Tester -{ - int Add(int x, int y) => x + y; -}`; - let tokens = tokenize(input); + const input = Input.InClass(`int Add(int x, int y) => x + y;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.MethodName("Add"), Tokens.Puncuation.Parenthesis.Open, @@ -98,9 +71,7 @@ class Tester Tokens.Variables.ReadWrite("x"), Tokens.Operators.Arithmetic.Addition, Tokens.Variables.ReadWrite("y"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index f7f3137ba8..8e9ce7b84b 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,94 +12,54 @@ describe("Grammar", () => { describe("Literals - numeric", () => { it("decimal zero", () => { - const input = ` -class C { - int x = 0; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`int x = 0;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Numeric.Decimal("0"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("hexadecimal zero", () => { - const input = ` -class C { - int x = 0x0; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`int x = 0x0;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Numeric.Hexadecimal("0x0"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("binary zero", () => { - const input = ` -class C { - int x = 0b0; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`int x = 0b0;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("int"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Numeric.Binary("0b0"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("floating-point zero", () => { - const input = ` -class C { - float x = 0.0; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`float x = 0.0;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("C"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("float"), Tokens.Identifiers.FieldName("x"), Tokens.Operators.Assignment, Tokens.Literals.Numeric.Decimal("0.0"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index bfac4472ff..a35b797e43 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Tokens } from './utils/tokenizer'; describe("Grammar", () => { before(() => should()); @@ -12,22 +12,15 @@ describe("Grammar", () => { describe("Property", () => { it("declaration", () => { - const input = ` -class Tester + const input = Input.InClass(` +public IBooom Property { - public IBooom Property - { - get { return null; } - set { something = value; } - } -}`; - let tokens = tokenize(input); + get { return null; } + set { something = value; } +}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("IBooom"), Tokens.Identifiers.PropertyName("Property"), @@ -45,25 +38,15 @@ class Tester Tokens.Variables.ReadWrite("value"), Tokens.Puncuation.Semicolon, Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration single line", () => { - const input = ` -class Tester -{ - public IBooom Property { get { return null; } private set { something = value; } } -}`; - let tokens = tokenize(input); + const input = Input.InClass(`public IBooom Property { get { return null; } private set { something = value; } }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("IBooom"), Tokens.Identifiers.PropertyName("Property"), @@ -82,25 +65,15 @@ class Tester Tokens.Variables.ReadWrite("value"), Tokens.Puncuation.Semicolon, Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("declaration without modifiers", () => { - const input = ` -class Tester -{ - IBooom Property {get; set;} -}`; - let tokens = tokenize(input); + const input = Input.InClass(`IBooom Property {get; set;}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Type("IBooom"), Tokens.Identifiers.PropertyName("Property"), Tokens.Puncuation.CurlyBrace.Open, @@ -108,25 +81,15 @@ class Tester Tokens.Puncuation.Semicolon, Tokens.Keywords.Set, Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property single line", function () { - const input = ` -class Tester -{ - public IBooom Property { get; set; } -}`; - let tokens = tokenize(input); + const input = Input.InClass(`public IBooom Property { get; set; }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("IBooom"), Tokens.Identifiers.PropertyName("Property"), @@ -135,25 +98,15 @@ class Tester Tokens.Puncuation.Semicolon, Tokens.Keywords.Set, Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property single line (protected internal)", function () { - const input = ` -class Tester -{ - protected internal IBooom Property { get; set; } -}`; - let tokens = tokenize(input); + const input = Input.InClass(`protected internal IBooom Property { get; set; }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Protected, Tokens.Keywords.Modifiers.Internal, Tokens.Type("IBooom"), @@ -163,29 +116,20 @@ class Tester Tokens.Puncuation.Semicolon, Tokens.Keywords.Set, Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property", () => { - const input = ` -class Tester + const input = Input.InClass(` +public IBooom Property { - public IBooom Property - { - get; - set; - } -}`; - let tokens = tokenize(input); + get; + set; +}`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("IBooom"), Tokens.Identifiers.PropertyName("Property"), @@ -194,25 +138,15 @@ class Tester Tokens.Puncuation.Semicolon, Tokens.Keywords.Set, Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("generic auto-property", () => { - const input = ` -class Tester -{ - public Dictionary[]> Property { get; set; } -}`; - let tokens = tokenize(input); + const input = Input.InClass(`public Dictionary[]> Property { get; set; }`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("Dictionary"), Tokens.Puncuation.TypeParameters.Begin, @@ -231,26 +165,15 @@ class Tester Tokens.Puncuation.Semicolon, Tokens.Keywords.Set, Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); }); it("auto-property initializer", () => { - const input = ` -class Tester -{ - public Dictionary[]> Property { get; } = new Dictionary[]>(); -}`; - - let tokens = tokenize(input); + const input = Input.InClass(`public Dictionary[]> Property { get; } = new Dictionary[]>();`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, Tokens.Type("Dictionary"), Tokens.Puncuation.TypeParameters.Begin, @@ -283,28 +206,17 @@ class Tester Tokens.Puncuation.TypeParameters.End, Tokens.Puncuation.Parenthesis.Open, Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); it("expression body", () => { - const input = ` -public class Tester -{ - private string prop1 => "hello"; - private bool prop2 => true; -}`; - - let tokens = tokenize(input); + const input = Input.InClass(` +private string prop1 => "hello"; +private bool prop2 => true;`); + const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Tester"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Private, Tokens.Type("string"), Tokens.Identifiers.PropertyName("prop1"), @@ -319,9 +231,7 @@ public class Tester Tokens.Identifiers.PropertyName("prop2"), Tokens.Operators.Arrow, Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close]); + Tokens.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index b3dc6d74e6..ae4e1eea0b 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -12,11 +12,8 @@ describe("Grammar", () => { describe("Structs", () => { it("simple struct", () => { - const input = ` -struct S { } -`; - - let tokens = tokenize(input); + const input = `struct S { }`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct, @@ -31,8 +28,7 @@ struct S { } interface IFoo { } struct S : IFoo { } `; - - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Interface, @@ -52,8 +48,7 @@ struct S : IFoo { } const input = ` struct S { } `; - - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct, @@ -67,8 +62,7 @@ struct S { } const input = ` struct S where T1 : T2 { } `; - - let tokens = tokenize(input); + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Struct, diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index b8b32411ad..070e4c5a59 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -12,10 +12,8 @@ describe("Grammar", () => { describe("Using directives", () => { it("using namespace", () => { - const input = ` -using System;`; - - let tokens = tokenize(input); + const input = `using System;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -25,10 +23,8 @@ using System;`; it("using static type", () => { - const input = ` -using static System.Console;`; - - let tokens = tokenize(input); + const input = `using static System.Console;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -41,10 +37,8 @@ using static System.Console;`; it("namespace alias", () => { - const input = ` -using S = System;`; - - let tokens = tokenize(input); + const input = `using S = System;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -56,10 +50,8 @@ using S = System;`; it("type alias", () => { - const input = ` -using C = System.Console;`; - - let tokens = tokenize(input); + const input = `using C = System.Console;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -73,10 +65,8 @@ using C = System.Console;`; it("type alias with generic type", () => { - const input = ` -using IntList = System.Collections.Generic.List;`; - - let tokens = tokenize(input); + const input = `using IntList = System.Collections.Generic.List;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -99,10 +89,8 @@ using IntList = System.Collections.Generic.List;`; it("type alias with nested generic types", () => { - const input = ` -using X = System.Collections.Generic.Dictionary>;`; - - let tokens = tokenize(input); + const input = `using X = System.Collections.Generic.Dictionary>;`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, @@ -138,10 +126,8 @@ using X = System.Collections.Generic.Dictionary { - const input = ` -using X =/**/Dictionary/**//**/>/**/;//end`; - - let tokens = tokenize(input); + const input = `using X =/**/Dictionary/**//**/>/**/;//end`; + const tokens = tokenize(input); tokens.should.deep.equal([ Tokens.Keywords.Using, diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index cbc52b8122..436d0ad5ec 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -68,7 +68,7 @@ export class Input { public static InClass(input: string) { let text = ` -class Tester { +class TestClass { ${input} }`; @@ -81,8 +81,8 @@ class Tester { public static InMethod(input: string) { let text = ` -class Tester { - void M() { +class TestClass { + void TestMethod() { ${input} } }`; @@ -93,6 +93,19 @@ class Tester { return new Input(lines, new Span(3, 8, lines.length - 2, 0)); } + + public static InNamespace(input: string) { + let text = ` +namespace TestNamespace { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, new Span(2, 4, lines.length - 1, 0)); + } } export class Token { From 20aa1f56715c30138c87c7e3f0b0773efa6db925 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 13:31:10 -0800 Subject: [PATCH 041/192] Rename csharp2.json to csharp-new.json --- syntaxes/{csharp2.json => csharp-new.json} | 0 test/syntaxes/utils/tokenizer.ts | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename syntaxes/{csharp2.json => csharp-new.json} (100%) diff --git a/syntaxes/csharp2.json b/syntaxes/csharp-new.json similarity index 100% rename from syntaxes/csharp2.json rename to syntaxes/csharp-new.json diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenizer.ts index 436d0ad5ec..23a53ba49b 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenizer.ts @@ -6,7 +6,7 @@ import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; const registry = new Registry(); -const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp2.json'); +const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp-new.json'); const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { From 9f90808dc02626a2f16215cda17b289b2e31a927 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 14:16:45 -0800 Subject: [PATCH 042/192] Further test clean up --- test/syntaxes/attributes.test.syntax.ts | 178 +++--- test/syntaxes/boolean-literals.test.syntax.ts | 22 +- test/syntaxes/classes.test.syntax.ts | 326 +++++------ test/syntaxes/comments.test.syntax.ts | 18 +- test/syntaxes/delegates.test.syntax.ts | 88 +-- test/syntaxes/enums.test.syntax.ts | 72 +-- test/syntaxes/events.test.syntax.ts | 98 ++-- test/syntaxes/extern-aliases.test.syntax.ts | 18 +- test/syntaxes/fields.test.syntax.ts | 168 +++--- test/syntaxes/indexers.test.syntax.ts | 34 +- .../interation-statements.test.syntax.ts | 2 +- test/syntaxes/interfaces.test.syntax.ts | 62 +- .../interpolated-strings.test.syntax.ts | 202 +++---- test/syntaxes/methods.test.syntax.ts | 74 +-- test/syntaxes/namespaces.test.syntax.ts | 130 ++-- test/syntaxes/numeric-literals.test.syntax.ts | 42 +- test/syntaxes/properties.test.syntax.ts | 282 ++++----- test/syntaxes/structs.test.syntax.ts | 54 +- test/syntaxes/using-directives.test.syntax.ts | 204 +++---- .../utils/{tokenizer.ts => tokenize.ts} | 553 +++++++++--------- 20 files changed, 1312 insertions(+), 1315 deletions(-) rename test/syntaxes/utils/{tokenizer.ts => tokenize.ts} (91%) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index cac20ee72b..3a44f464c7 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,9 +16,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.SquareBracket.Close]); }); it("global attribute with specifier", () => { @@ -27,11 +27,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Keywords.AttributeSpecifier("assembly"), - Tokens.Puncuation.Colon, - Tokens.Type("Foo"), - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Keywords.AttributeSpecifier("assembly"), + Token.Puncuation.Colon, + Token.Type("Foo"), + Token.Puncuation.SquareBracket.Close]); }); it("Two global attributes in same section with specifier", () => { @@ -40,13 +40,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Keywords.AttributeSpecifier("module"), - Tokens.Puncuation.Colon, - Tokens.Type("Foo"), - Tokens.Puncuation.Comma, - Tokens.Type("Bar"), - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Keywords.AttributeSpecifier("module"), + Token.Puncuation.Colon, + Token.Type("Foo"), + Token.Puncuation.Comma, + Token.Type("Bar"), + Token.Puncuation.SquareBracket.Close]); }); it("Two global attributes in same section with specifier and empty argument lists", () => { @@ -55,17 +55,17 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Keywords.AttributeSpecifier("module"), - Tokens.Puncuation.Colon, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Comma, - Tokens.Type("Bar"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Keywords.AttributeSpecifier("module"), + Token.Puncuation.Colon, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Comma, + Token.Type("Bar"), + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with one argument", () => { @@ -74,12 +74,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with two arguments", () => { @@ -88,14 +88,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Comma, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Comma, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with three arguments", () => { @@ -104,18 +104,18 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Comma, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Comma, - Tokens.Puncuation.String.Begin, - Tokens.Literals.String("text"), - Tokens.Puncuation.String.End, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Comma, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Comma, + Token.Puncuation.String.Begin, + Token.Literals.String("text"), + Token.Puncuation.String.End, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with named argument", () => { @@ -124,14 +124,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Identifiers.PropertyName("Bar"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Identifiers.PropertyName("Bar"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with one positional argument and one named argument", () => { @@ -140,16 +140,16 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Comma, - Tokens.Identifiers.PropertyName("Bar"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Comma, + Token.Identifiers.PropertyName("Bar"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); it("Global attribute with specifier, one positional argument, and two named arguments", () => { @@ -158,24 +158,24 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Puncuation.SquareBracket.Open, - Tokens.Keywords.AttributeSpecifier("module"), - Tokens.Puncuation.Colon, - Tokens.Type("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Comma, - Tokens.Identifiers.PropertyName("Bar"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Comma, - Tokens.Identifiers.PropertyName("Baz"), - Tokens.Operators.Assignment, - Tokens.Puncuation.String.Begin, - Tokens.Literals.String("hello"), - Tokens.Puncuation.String.End, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.SquareBracket.Close]); + Token.Puncuation.SquareBracket.Open, + Token.Keywords.AttributeSpecifier("module"), + Token.Puncuation.Colon, + Token.Type("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Comma, + Token.Identifiers.PropertyName("Bar"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Comma, + Token.Identifiers.PropertyName("Baz"), + Token.Operators.Assignment, + Token.Puncuation.String.Begin, + Token.Literals.String("hello"), + Token.Puncuation.String.End, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.SquareBracket.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index 809686175f..0de9433e22 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,11 +16,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("bool"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon]); + Token.Type("bool"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Boolean.True, + Token.Puncuation.Semicolon]); }); it("false", () => { @@ -29,11 +29,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("bool"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Boolean.False, - Tokens.Puncuation.Semicolon]); + Token.Type("bool"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Boolean.False, + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 24236769fc..fee241060a 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -34,61 +34,61 @@ public abstract class PublicAbstractClass { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("DefaultClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Internal, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("InternalClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Static, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("DefaultStaticClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Modifiers.Static, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicStaticClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Sealed, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("DefaultSealedClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Modifiers.Sealed, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicSealedClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Modifiers.Abstract, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicAbstractClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Modifiers.Abstract, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("DefaultAbstractClass"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Class, + Token.Identifiers.ClassName("DefaultClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Internal, + Token.Keywords.Class, + Token.Identifiers.ClassName("InternalClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Static, + Token.Keywords.Class, + Token.Identifiers.ClassName("DefaultStaticClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicStaticClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Sealed, + Token.Keywords.Class, + Token.Identifiers.ClassName("DefaultSealedClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Sealed, + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicSealedClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Abstract, + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicAbstractClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Modifiers.Abstract, + Token.Keywords.Class, + Token.Identifiers.ClassName("DefaultAbstractClass"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generics in identifier", () => { @@ -97,10 +97,10 @@ public abstract class PublicAbstractClass { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Dictionary"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Class, + Token.Identifiers.ClassName("Dictionary"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("inheritance", () => { @@ -113,61 +113,61 @@ class PublicClass : Dictionary>, IMap"), - Tokens.Puncuation.Colon, - Tokens.Type("Root"), - Tokens.Puncuation.Accessor, - Tokens.Type("IInterface"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("Something"), - Tokens.Puncuation.Accessor, - Tokens.Type("Nested"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Comma, - Tokens.Type("Something"), - Tokens.Puncuation.Accessor, - Tokens.Type("IInterfaceTwo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicClass"), - Tokens.Puncuation.Colon, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.Comma, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.Comma, - Tokens.Type("string"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Comma, - Tokens.Type("IMap"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.Comma, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.Comma, - Tokens.Type("string"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Puncuation.Colon, + Token.Type("IInterface"), + Token.Puncuation.Comma, + Token.Type("IInterfaceTwo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Puncuation.Colon, + Token.Type("Root"), + Token.Puncuation.Accessor, + Token.Type("IInterface"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("Something"), + Token.Puncuation.Accessor, + Token.Type("Nested"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Comma, + Token.Type("Something"), + Token.Puncuation.Accessor, + Token.Type("IInterfaceTwo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Puncuation.Colon, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.Comma, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.Comma, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Comma, + Token.Type("IMap"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.Comma, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.Comma, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic constraints", () => { @@ -182,45 +182,45 @@ class PublicClass : Dictionary[]>, ISomething const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicClass"), - Tokens.Keywords.Where, - Tokens.Type("T"), - Tokens.Puncuation.Colon, - Tokens.Type("ISomething"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("PublicClass"), - Tokens.Puncuation.Colon, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.Comma, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Comma, - Tokens.Type("ISomething"), - Tokens.Keywords.Where, - Tokens.Type("T"), - Tokens.Puncuation.Colon, - Tokens.Type("ICar"), - Tokens.Puncuation.Comma, - Tokens.Keywords.New, - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Keywords.Where, - Tokens.Type("X"), - Tokens.Puncuation.Colon, - Tokens.Keywords.Struct, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Keywords.Where, + Token.Type("T"), + Token.Puncuation.Colon, + Token.Type("ISomething"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + + Token.Keywords.Class, + Token.Identifiers.ClassName("PublicClass"), + Token.Puncuation.Colon, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.Comma, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Comma, + Token.Type("ISomething"), + Token.Keywords.Where, + Token.Type("T"), + Token.Puncuation.Colon, + Token.Type("ICar"), + Token.Puncuation.Comma, + Token.Keywords.New, + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Keywords.Where, + Token.Type("X"), + Token.Puncuation.Colon, + Token.Keywords.Struct, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("nested class", () => { @@ -236,17 +236,17 @@ class Klass const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Klass"), - Tokens.Puncuation.CurlyBrace.Open, + Token.Keywords.Class, + Token.Identifiers.ClassName("Klass"), + Token.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Class, - Tokens.Identifiers.ClassName("Nested"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("Nested"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 1620179eeb..3d5dd2d9f4 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,8 +16,8 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.SingleLine.Start, - Tokens.Comment.SingleLine.Text(" foo")]); + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); }); it("single-line comment after whitespace", () => { @@ -26,9 +26,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.LeadingWhitespace(" "), - Tokens.Comment.SingleLine.Start, - Tokens.Comment.SingleLine.Text(" foo")]); + Token.Comment.LeadingWhitespace(" "), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); }); it("multi-line comment", () => { @@ -37,9 +37,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.Text(" foo "), - Tokens.Comment.MultiLine.End]); + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.Text(" foo "), + Token.Comment.MultiLine.End]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 7654d37d9c..167da76d20 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,12 +16,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate, - Tokens.Type("void"), - Tokens.Identifiers.DelegateName("D"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Delegate, + Token.Type("void"), + Token.Identifiers.DelegateName("D"), + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Semicolon]); }); it("generic delegate with variance", () => { @@ -30,14 +30,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate, - Tokens.Type("TResult"), - Tokens.Identifiers.DelegateName("D"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Type("T"), - Tokens.Variables.Parameter("arg1"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Delegate, + Token.Type("TResult"), + Token.Identifiers.DelegateName("D"), + Token.Puncuation.Parenthesis.Open, + Token.Type("T"), + Token.Variables.Parameter("arg1"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Semicolon]); }); it("generic delegate with constraints", () => { @@ -50,16 +50,16 @@ delegate void D() const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate, - Tokens.Type("void"), - Tokens.Identifiers.DelegateName("D"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Keywords.Where, - Tokens.Type("T1"), - Tokens.Puncuation.Colon, - Tokens.Type("T2"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Delegate, + Token.Type("void"), + Token.Identifiers.DelegateName("D"), + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Keywords.Where, + Token.Type("T1"), + Token.Puncuation.Colon, + Token.Type("T2"), + Token.Puncuation.Semicolon]); }); it("delegate with multiple parameters", () => { @@ -68,25 +68,25 @@ delegate void D() const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Delegate, - Tokens.Type("int"), - Tokens.Identifiers.DelegateName("D"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Keywords.Modifiers.Ref, - Tokens.Type("string"), - Tokens.Variables.Parameter("x"), - Tokens.Puncuation.Comma, - Tokens.Keywords.Modifiers.Out, - Tokens.Type("int"), - Tokens.Variables.Parameter("y"), - Tokens.Puncuation.Comma, - Tokens.Keywords.Modifiers.Params, - Tokens.Type("object"), - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Variables.Parameter("z"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Delegate, + Token.Type("int"), + Token.Identifiers.DelegateName("D"), + Token.Puncuation.Parenthesis.Open, + Token.Keywords.Modifiers.Ref, + Token.Type("string"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Keywords.Modifiers.Out, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.Comma, + Token.Keywords.Modifiers.Params, + Token.Type("object"), + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Variables.Parameter("z"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 496067d7bd..15dc8c7278 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,10 +16,10 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum, - Tokens.Identifiers.EnumName("E"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Enum, + Token.Identifiers.EnumName("E"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("enum with base type", () => { @@ -28,12 +28,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum, - Tokens.Identifiers.EnumName("E"), - Tokens.Puncuation.Colon, - Tokens.Type("byte"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Enum, + Token.Identifiers.EnumName("E"), + Token.Puncuation.Colon, + Token.Type("byte"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("enum with single member", () => { @@ -42,11 +42,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum, - Tokens.Identifiers.EnumName("E"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Variables.EnumMember("M1"), - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Enum, + Token.Identifiers.EnumName("E"), + Token.Puncuation.CurlyBrace.Open, + Token.Variables.EnumMember("M1"), + Token.Puncuation.CurlyBrace.Close]); }); it("enum with multiple members", () => { @@ -55,15 +55,15 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum, - Tokens.Identifiers.EnumName("Color"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Variables.EnumMember("Red"), - Tokens.Puncuation.Comma, - Tokens.Variables.EnumMember("Green"), - Tokens.Puncuation.Comma, - Tokens.Variables.EnumMember("Blue"), - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Enum, + Token.Identifiers.EnumName("Color"), + Token.Puncuation.CurlyBrace.Open, + Token.Variables.EnumMember("Red"), + Token.Puncuation.Comma, + Token.Variables.EnumMember("Green"), + Token.Puncuation.Comma, + Token.Variables.EnumMember("Blue"), + Token.Puncuation.CurlyBrace.Close]); }); it("enum with initialized member", () => { @@ -80,17 +80,17 @@ enum E const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Enum, - Tokens.Identifiers.EnumName("E"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Variables.EnumMember("Value1"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("1"), - Tokens.Puncuation.Comma, - Tokens.Variables.EnumMember("Value2"), - Tokens.Puncuation.Comma, - Tokens.Variables.EnumMember("Value3"), - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Enum, + Token.Identifiers.EnumName("E"), + Token.Puncuation.CurlyBrace.Open, + Token.Variables.EnumMember("Value1"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("1"), + Token.Puncuation.Comma, + Token.Variables.EnumMember("Value2"), + Token.Puncuation.Comma, + Token.Variables.EnumMember("Value3"), + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 7d757472ef..2e3989be23 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,11 +16,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Event, - Tokens.Type("Type"), - Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Public, + Token.Keywords.Event, + Token.Type("Type"), + Token.Identifiers.EventName("Event"), + Token.Puncuation.Semicolon]); }); it("declaration with multiple modifiers", () => { @@ -29,12 +29,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Protected, - Tokens.Keywords.Modifiers.Internal, - Tokens.Keywords.Event, - Tokens.Type("Type"), - Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Protected, + Token.Keywords.Modifiers.Internal, + Token.Keywords.Event, + Token.Type("Type"), + Token.Identifiers.EventName("Event"), + Token.Puncuation.Semicolon]); }); it("declaration with multiple declarators", () => { @@ -43,13 +43,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Event, - Tokens.Type("Type"), - Tokens.Identifiers.EventName("Event1"), - Tokens.Puncuation.Comma, - Tokens.Identifiers.EventName("Event2"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Public, + Token.Keywords.Event, + Token.Type("Type"), + Token.Identifiers.EventName("Event1"), + Token.Puncuation.Comma, + Token.Identifiers.EventName("Event2"), + Token.Puncuation.Semicolon]); }); it("generic", () => { @@ -58,24 +58,24 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Event, - Tokens.Type("EventHandler"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Comma, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.Comma, - Tokens.Type("D"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Public, + Token.Keywords.Event, + Token.Type("EventHandler"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Comma, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.Comma, + Token.Type("D"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Identifiers.EventName("Event"), + Token.Puncuation.Semicolon]); }); it("declaration with accessors", () => { @@ -90,18 +90,18 @@ public event Type Event const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Keywords.Event, - Tokens.Type("Type"), - Tokens.Identifiers.EventName("Event"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Add, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Keywords.Remove, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Keywords.Event, + Token.Type("Type"), + Token.Identifiers.EventName("Event"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Add, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.Remove, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index da4770b1ee..952ba2653b 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -19,14 +19,14 @@ extern alias Y;`; const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Extern, - Tokens.Keywords.Alias, - Tokens.Variables.Alias("X"), - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Extern, - Tokens.Keywords.Alias, - Tokens.Variables.Alias("Y"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Extern, + Token.Keywords.Alias, + Token.Variables.Alias("X"), + Token.Puncuation.Semicolon, + Token.Keywords.Extern, + Token.Keywords.Alias, + Token.Variables.Alias("Y"), + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index c6d5a2e897..3aefb1d7d4 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -20,20 +20,20 @@ private List field123;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Private, - Tokens.Type("List"), - Tokens.Identifiers.FieldName("_field"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Modifiers.Private, - Tokens.Type("List"), - Tokens.Identifiers.FieldName("field"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Modifiers.Private, - Tokens.Type("List"), - Tokens.Identifiers.FieldName("field123"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Private, + Token.Type("List"), + Token.Identifiers.FieldName("_field"), + Token.Puncuation.Semicolon, + + Token.Keywords.Modifiers.Private, + Token.Type("List"), + Token.Identifiers.FieldName("field"), + Token.Puncuation.Semicolon, + + Token.Keywords.Modifiers.Private, + Token.Type("List"), + Token.Identifiers.FieldName("field123"), + Token.Puncuation.Semicolon]); }); it("generic", () => { @@ -42,23 +42,23 @@ private List field123;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Private, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Comma, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.Comma, - Tokens.Type("D"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Identifiers.FieldName("_field"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Private, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Comma, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.Comma, + Token.Type("D"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Identifiers.FieldName("_field"), + Token.Puncuation.Semicolon]); }); @@ -72,21 +72,21 @@ string _field3;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Private, - Tokens.Keywords.Modifiers.Static, - Tokens.Keywords.Modifiers.ReadOnly, - Tokens.Type("List"), - Tokens.Identifiers.FieldName("_field"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Modifiers.ReadOnly, - Tokens.Type("string"), - Tokens.Identifiers.FieldName("_field2"), - Tokens.Puncuation.Semicolon, - - Tokens.Type("string"), - Tokens.Identifiers.FieldName("_field3"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.Static, + Token.Keywords.Modifiers.ReadOnly, + Token.Type("List"), + Token.Identifiers.FieldName("_field"), + Token.Puncuation.Semicolon, + + Token.Keywords.Modifiers.ReadOnly, + Token.Type("string"), + Token.Identifiers.FieldName("_field2"), + Token.Puncuation.Semicolon, + + Token.Type("string"), + Token.Identifiers.FieldName("_field3"), + Token.Puncuation.Semicolon]); }); it("types", () => { @@ -98,15 +98,15 @@ string[] field123;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("field123"), - Tokens.Puncuation.Semicolon, - - Tokens.Type("string"), - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Identifiers.FieldName("field123"), - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("field123"), + Token.Puncuation.Semicolon, + + Token.Type("string"), + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Identifiers.FieldName("field123"), + Token.Puncuation.Semicolon]); }); it("assignment", () => { @@ -118,21 +118,21 @@ const bool field = true;`); let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Private, - Tokens.Type("string"), - Tokens.Identifiers.FieldName("field"), - Tokens.Operators.Assignment, - Tokens.Puncuation.String.Begin, - Tokens.Literals.String("hello"), - Tokens.Puncuation.String.End, - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Modifiers.Const, - Tokens.Type("bool"), - Tokens.Identifiers.FieldName("field"), - Tokens.Operators.Assignment, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Private, + Token.Type("string"), + Token.Identifiers.FieldName("field"), + Token.Operators.Assignment, + Token.Puncuation.String.Begin, + Token.Literals.String("hello"), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon, + + Token.Keywords.Modifiers.Const, + Token.Type("bool"), + Token.Identifiers.FieldName("field"), + Token.Operators.Assignment, + Token.Literals.Boolean.True, + Token.Puncuation.Semicolon]); }); it("declaration with multiple declarators", () => { @@ -141,19 +141,19 @@ const bool field = true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("19"), - Tokens.Puncuation.Comma, - Tokens.Identifiers.FieldName("y"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("23"), - Tokens.Puncuation.Comma, - Tokens.Identifiers.FieldName("z"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("42"), - Tokens.Puncuation.Semicolon]); + Token.Type("int"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Identifiers.FieldName("y"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.Comma, + Token.Identifiers.FieldName("z"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon]); }); }); }); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 6806157aef..ff02093f8e 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -21,22 +21,22 @@ public string this[int index] const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("string"), - Tokens.Keywords.This, - Tokens.Puncuation.SquareBracket.Open, - Tokens.Type("int"), - Tokens.Variables.Parameter("index"), - Tokens.Puncuation.SquareBracket.Close, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Return, - Tokens.Variables.ReadWrite("index"), - Tokens.Variables.ReadWrite("ToString"), - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("string"), + Token.Keywords.This, + Token.Puncuation.SquareBracket.Open, + Token.Type("int"), + Token.Variables.Parameter("index"), + Token.Puncuation.SquareBracket.Close, + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Return, + Token.Variables.ReadWrite("index"), + Token.Variables.ReadWrite("ToString"), + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interation-statements.test.syntax.ts b/test/syntaxes/interation-statements.test.syntax.ts index 823d7ff2fd..448d0fd876 100644 --- a/test/syntaxes/interation-statements.test.syntax.ts +++ b/test/syntaxes/interation-statements.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index a0152bf643..7a9aca3ed8 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,10 +16,10 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("interface inheritance", () => { @@ -32,16 +32,16 @@ interface IBar : IFoo { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IBar"), - Tokens.Puncuation.Colon, - Tokens.Type("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IBar"), + Token.Puncuation.Colon, + Token.Type("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic interface", () => { @@ -50,10 +50,10 @@ interface IBar : IFoo { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic interface with variance", () => { @@ -62,10 +62,10 @@ interface IBar : IFoo { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic interface with constraints", () => { @@ -74,14 +74,14 @@ interface IBar : IFoo { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Keywords.Where, - Tokens.Type("T1"), - Tokens.Puncuation.Colon, - Tokens.Type("T2"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Keywords.Where, + Token.Type("T1"), + Token.Puncuation.Colon, + Token.Type("T2"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index da401d6129..3f1aa17dfb 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,21 +16,21 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hello "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("one"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String(" world "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("two"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hello "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("one"), + Token.Puncuation.Interpolation.End, + Token.Literals.String(" world "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("two"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("no interpolations", () => { @@ -39,13 +39,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hello world!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hello world!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("no interpolations due to escaped braces", () => { @@ -54,13 +54,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hello {{one}} world {{two}}!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hello {{one}} world {{two}}!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("two interpolations with escaped braces", () => { @@ -69,23 +69,23 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hello "), - Tokens.Literals.String("{{"), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("one"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("}} world "), - Tokens.Literals.String("{{"), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("two"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("}}!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hello "), + Token.Literals.String("{{"), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("one"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("}} world "), + Token.Literals.String("{{"), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("two"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("}}!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("no interpolations due to double-escaped braces", () => { @@ -94,13 +94,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hello {{{{one}}}} world {{{{two}}}}!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hello {{{{one}}}} world {{{{two}}}}!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("break across two lines (non-verbatim)", () => { @@ -111,17 +111,17 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.Begin, - Tokens.Literals.String("hell"), + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.Begin, + Token.Literals.String("hell"), // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. - Tokens.IllegalNewLine("o"), - Tokens.Variables.ReadWrite("world"), - Tokens.Puncuation.String.Begin, - Tokens.IllegalNewLine(";")]); + Token.IllegalNewLine("o"), + Token.Variables.ReadWrite("world"), + Token.Puncuation.String.Begin, + Token.IllegalNewLine(";")]); }); it("verbatim with two interpolations", () => { @@ -130,21 +130,21 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.VerbatimBegin, - Tokens.Literals.String("hello "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("one"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String(" world "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("two"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Literals.String("hello "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("one"), + Token.Puncuation.Interpolation.End, + Token.Literals.String(" world "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("two"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("break across two lines with two interpolations (verbatim)", () => { @@ -155,21 +155,21 @@ world {two}!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.VerbatimBegin, - Tokens.Literals.String("hello "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("one"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("world "), - Tokens.Puncuation.Interpolation.Begin, - Tokens.Variables.ReadWrite("two"), - Tokens.Puncuation.Interpolation.End, - Tokens.Literals.String("!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Literals.String("hello "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("one"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("world "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("two"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); it("break across two lines with no interpolations (verbatim)", () => { @@ -180,14 +180,14 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("string"), - Tokens.Identifiers.FieldName("test"), - Tokens.Operators.Assignment, - Tokens.Puncuation.InterpolatedString.VerbatimBegin, - Tokens.Literals.String("hello"), - Tokens.Literals.String("world!"), - Tokens.Puncuation.InterpolatedString.End, - Tokens.Puncuation.Semicolon]); + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Literals.String("hello"), + Token.Literals.String("world!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 3a4808fba6..c3ad030e91 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,12 +16,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("void"), - Tokens.Identifiers.MethodName("Foo"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Type("void"), + Token.Identifiers.MethodName("Foo"), + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("declaration with two parameters", () => { @@ -34,22 +34,22 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.MethodName("Add"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Type("int"), - Tokens.Variables.Parameter("x"), - Tokens.Puncuation.Comma, - Tokens.Type("int"), - Tokens.Variables.Parameter("y"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Return, - Tokens.Variables.ReadWrite("x"), - Tokens.Operators.Arithmetic.Addition, - Tokens.Variables.ReadWrite("y"), - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Type("int"), + Token.Identifiers.MethodName("Add"), + Token.Puncuation.Parenthesis.Open, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("expression body", () => { @@ -58,20 +58,20 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.MethodName("Add"), - Tokens.Puncuation.Parenthesis.Open, - Tokens.Type("int"), - Tokens.Variables.Parameter("x"), - Tokens.Puncuation.Comma, - Tokens.Type("int"), - Tokens.Variables.Parameter("y"), - Tokens.Puncuation.Parenthesis.Close, - Tokens.Operators.Arrow, - Tokens.Variables.ReadWrite("x"), - Tokens.Operators.Arithmetic.Addition, - Tokens.Variables.ReadWrite("y"), - Tokens.Puncuation.Semicolon]); + Token.Type("int"), + Token.Identifiers.MethodName("Add"), + Token.Puncuation.Parenthesis.Open, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.Parenthesis.Close, + Token.Operators.Arrow, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index f6b8ceb3a6..376658ab20 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -19,10 +19,10 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("TestNamespace"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("has a namespace keyword and a dotted name", () => { @@ -34,12 +34,12 @@ namespace Test.Namespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("Test"), - Tokens.Puncuation.Accessor, - Tokens.Identifiers.NamespaceName("Namespace"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("Test"), + Token.Puncuation.Accessor, + Token.Identifiers.NamespaceName("Namespace"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("can be nested", () => { @@ -54,16 +54,16 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("TestNamespace"), + Token.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("NestedNamespace"), - Tokens.Puncuation.CurlyBrace.Open, + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("NestedNamespace"), + Token.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); it("can contain using statements", () => { @@ -86,52 +86,52 @@ namespace TestNamespace let tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.NamespaceName("UsingOne"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("one"), - Tokens.Operators.Assignment, - Tokens.Type("UsingOne"), - Tokens.Puncuation.Accessor, - Tokens.Type("Something"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("TestNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - - Tokens.Keywords.Using, - Tokens.Identifiers.NamespaceName("UsingTwo"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("two"), - Tokens.Operators.Assignment, - Tokens.Type("UsingTwo"), - Tokens.Puncuation.Accessor, - Tokens.Type("Something"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Namespace, - Tokens.Identifiers.NamespaceName("NestedNamespace"), - Tokens.Puncuation.CurlyBrace.Open, - - Tokens.Keywords.Using, - Tokens.Identifiers.NamespaceName("UsingThree"), - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("three"), - Tokens.Operators.Assignment, - Tokens.Type("UsingThree"), - Tokens.Puncuation.Accessor, - Tokens.Type("Something"), - Tokens.Puncuation.Semicolon, - - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Using, + Token.Identifiers.NamespaceName("UsingOne"), + Token.Puncuation.Semicolon, + + Token.Keywords.Using, + Token.Identifiers.AliasName("one"), + Token.Operators.Assignment, + Token.Type("UsingOne"), + Token.Puncuation.Accessor, + Token.Type("Something"), + Token.Puncuation.Semicolon, + + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("TestNamespace"), + Token.Puncuation.CurlyBrace.Open, + + Token.Keywords.Using, + Token.Identifiers.NamespaceName("UsingTwo"), + Token.Puncuation.Semicolon, + + Token.Keywords.Using, + Token.Identifiers.AliasName("two"), + Token.Operators.Assignment, + Token.Type("UsingTwo"), + Token.Puncuation.Accessor, + Token.Type("Something"), + Token.Puncuation.Semicolon, + + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("NestedNamespace"), + Token.Puncuation.CurlyBrace.Open, + + Token.Keywords.Using, + Token.Identifiers.NamespaceName("UsingThree"), + Token.Puncuation.Semicolon, + + Token.Keywords.Using, + Token.Identifiers.AliasName("three"), + Token.Operators.Assignment, + Token.Type("UsingThree"), + Token.Puncuation.Accessor, + Token.Type("Something"), + Token.Puncuation.Semicolon, + + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 8e9ce7b84b..c0c6467a83 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,11 +16,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("0"), - Tokens.Puncuation.Semicolon]); + Token.Type("int"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon]); }); it("hexadecimal zero", () => { @@ -29,11 +29,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Hexadecimal("0x0"), - Tokens.Puncuation.Semicolon]); + Token.Type("int"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Hexadecimal("0x0"), + Token.Puncuation.Semicolon]); }); it("binary zero", () => { @@ -42,11 +42,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("int"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Binary("0b0"), - Tokens.Puncuation.Semicolon]); + Token.Type("int"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Binary("0b0"), + Token.Puncuation.Semicolon]); }); it("floating-point zero", () => { @@ -55,11 +55,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("float"), - Tokens.Identifiers.FieldName("x"), - Tokens.Operators.Assignment, - Tokens.Literals.Numeric.Decimal("0.0"), - Tokens.Puncuation.Semicolon]); + Token.Type("float"), + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0.0"), + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index a35b797e43..95cb991a27 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Input, Tokens } from './utils/tokenizer'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -21,24 +21,24 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Return, - Tokens.Literals.Null, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Keywords.Set, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Variables.ReadWrite("something"), - Tokens.Operators.Assignment, - Tokens.Variables.ReadWrite("value"), - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Return, + Token.Literals.Null, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.Set, + Token.Puncuation.CurlyBrace.Open, + Token.Variables.ReadWrite("something"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); it("declaration single line", () => { @@ -47,25 +47,25 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Return, - Tokens.Literals.Null, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Keywords.Modifiers.Private, - Tokens.Keywords.Set, - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Variables.ReadWrite("something"), - Tokens.Operators.Assignment, - Tokens.Variables.ReadWrite("value"), - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Return, + Token.Literals.Null, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.Modifiers.Private, + Token.Keywords.Set, + Token.Puncuation.CurlyBrace.Open, + Token.Variables.ReadWrite("something"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CurlyBrace.Close]); }); it("declaration without modifiers", () => { @@ -74,14 +74,14 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Set, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("auto-property single line", function () { @@ -90,15 +90,15 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Set, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("auto-property single line (protected internal)", function () { @@ -107,16 +107,16 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Protected, - Tokens.Keywords.Modifiers.Internal, - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Set, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Protected, + Token.Keywords.Modifiers.Internal, + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("auto-property", () => { @@ -130,15 +130,15 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("IBooom"), - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Set, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("IBooom"), + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("generic auto-property", () => { @@ -147,25 +147,25 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.Comma, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Puncuation.TypeParameters.End, - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Keywords.Set, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Modifiers.Public, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.Comma, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Puncuation.TypeParameters.End, + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close]); }); it("auto-property initializer", () => { @@ -174,39 +174,39 @@ public IBooom Property const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Public, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.Comma, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Puncuation.TypeParameters.End, - Tokens.Identifiers.PropertyName("Property"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Keywords.Get, - Tokens.Puncuation.Semicolon, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Operators.Assignment, - Tokens.Keywords.New, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("string"), - Tokens.Puncuation.Comma, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("T"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.SquareBracket.Open, - Tokens.Puncuation.SquareBracket.Close, - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Parenthesis.Open, - Tokens.Puncuation.Parenthesis.Close, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Public, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.Comma, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Puncuation.TypeParameters.End, + Token.Identifiers.PropertyName("Property"), + Token.Puncuation.CurlyBrace.Open, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Puncuation.CurlyBrace.Close, + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.Comma, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.SquareBracket.Open, + Token.Puncuation.SquareBracket.Close, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Parenthesis.Open, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Semicolon]); }); it("expression body", () => { @@ -217,21 +217,21 @@ private bool prop2 => true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Modifiers.Private, - Tokens.Type("string"), - Tokens.Identifiers.PropertyName("prop1"), - Tokens.Operators.Arrow, - Tokens.Puncuation.String.Begin, - Tokens.Literals.String("hello"), - Tokens.Puncuation.String.End, - Tokens.Puncuation.Semicolon, - - Tokens.Keywords.Modifiers.Private, - Tokens.Type("bool"), - Tokens.Identifiers.PropertyName("prop2"), - Tokens.Operators.Arrow, - Tokens.Literals.Boolean.True, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Modifiers.Private, + Token.Type("string"), + Token.Identifiers.PropertyName("prop1"), + Token.Operators.Arrow, + Token.Puncuation.String.Begin, + Token.Literals.String("hello"), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon, + + Token.Keywords.Modifiers.Private, + Token.Type("bool"), + Token.Identifiers.PropertyName("prop2"), + Token.Operators.Arrow, + Token.Literals.Boolean.True, + Token.Puncuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index ae4e1eea0b..f64235b736 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,10 +16,10 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct, - Tokens.Identifiers.StructName("S"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Struct, + Token.Identifiers.StructName("S"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("struct interface implementation", () => { @@ -31,16 +31,16 @@ struct S : IFoo { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Interface, - Tokens.Identifiers.InterfaceName("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close, - Tokens.Keywords.Struct, - Tokens.Identifiers.StructName("S"), - Tokens.Puncuation.Colon, - Tokens.Type("IFoo"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.Struct, + Token.Identifiers.StructName("S"), + Token.Puncuation.Colon, + Token.Type("IFoo"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic struct", () => { @@ -51,10 +51,10 @@ struct S { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct, - Tokens.Identifiers.StructName("S"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Struct, + Token.Identifiers.StructName("S"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); it("generic struct with constraints", () => { @@ -65,14 +65,14 @@ struct S where T1 : T2 { } const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Struct, - Tokens.Identifiers.StructName("S"), - Tokens.Keywords.Where, - Tokens.Type("T1"), - Tokens.Puncuation.Colon, - Tokens.Type("T2"), - Tokens.Puncuation.CurlyBrace.Open, - Tokens.Puncuation.CurlyBrace.Close]); + Token.Keywords.Struct, + Token.Identifiers.StructName("S"), + Token.Keywords.Where, + Token.Type("T1"), + Token.Puncuation.Colon, + Token.Type("T2"), + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index 070e4c5a59..0d43f204c6 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Tokens } from './utils/tokenizer'; +import { tokenize, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -16,9 +16,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.NamespaceName("System"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Identifiers.NamespaceName("System"), + Token.Puncuation.Semicolon]); }); it("using static type", () => { @@ -27,12 +27,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Keywords.Static, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Console"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Keywords.Static, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Console"), + Token.Puncuation.Semicolon]); }); it("namespace alias", () => { @@ -41,11 +41,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("S"), - Tokens.Operators.Assignment, - Tokens.Type("System"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Identifiers.AliasName("S"), + Token.Operators.Assignment, + Token.Type("System"), + Token.Puncuation.Semicolon]); }); it("type alias", () => { @@ -54,13 +54,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("C"), - Tokens.Operators.Assignment, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Console"), - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Identifiers.AliasName("C"), + Token.Operators.Assignment, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Console"), + Token.Puncuation.Semicolon]); }); it("type alias with generic type", () => { @@ -69,22 +69,22 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("IntList"), - Tokens.Operators.Assignment, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Collections"), - Tokens.Puncuation.Accessor, - Tokens.Type("Generic"), - Tokens.Puncuation.Accessor, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Int32"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Identifiers.AliasName("IntList"), + Token.Operators.Assignment, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Collections"), + Token.Puncuation.Accessor, + Token.Type("Generic"), + Token.Puncuation.Accessor, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Int32"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Semicolon]); }); it("type alias with nested generic types", () => { @@ -93,35 +93,35 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("X"), - Tokens.Operators.Assignment, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Collections"), - Tokens.Puncuation.Accessor, - Tokens.Type("Generic"), - Tokens.Puncuation.Accessor, - Tokens.Type("Dictionary"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Int32"), - Tokens.Puncuation.Comma, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("Collections"), - Tokens.Puncuation.Accessor, - Tokens.Type("Generic"), - Tokens.Puncuation.Accessor, - Tokens.Type("List"), - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Type("System"), - Tokens.Puncuation.Accessor, - Tokens.Type("String"), - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Puncuation.Semicolon]); + Token.Keywords.Using, + Token.Identifiers.AliasName("X"), + Token.Operators.Assignment, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Collections"), + Token.Puncuation.Accessor, + Token.Type("Generic"), + Token.Puncuation.Accessor, + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Int32"), + Token.Puncuation.Comma, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Collections"), + Token.Puncuation.Accessor, + Token.Type("Generic"), + Token.Puncuation.Accessor, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("String"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Semicolon]); }); it("type alias with nested generic types and comments interspersed", () => { @@ -130,41 +130,41 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Tokens.Keywords.Using, - Tokens.Identifiers.AliasName("X"), - Tokens.Operators.Assignment, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Type("Dictionary"), - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Type("int"), - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.Comma, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Type("List"), - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.TypeParameters.Begin, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Type("string"), - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.TypeParameters.End, - Tokens.Comment.MultiLine.Start, - Tokens.Comment.MultiLine.End, - Tokens.Puncuation.Semicolon, - Tokens.Comment.SingleLine.Start, - Tokens.Comment.SingleLine.Text("end")]); + Token.Keywords.Using, + Token.Identifiers.AliasName("X"), + Token.Operators.Assignment, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Type("Dictionary"), + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.TypeParameters.Begin, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Type("int"), + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.Comma, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Type("List"), + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.TypeParameters.Begin, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Type("string"), + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.TypeParameters.End, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.TypeParameters.End, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.End, + Token.Puncuation.Semicolon, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("end")]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenizer.ts b/test/syntaxes/utils/tokenize.ts similarity index 91% rename from test/syntaxes/utils/tokenizer.ts rename to test/syntaxes/utils/tokenize.ts index 23a53ba49b..9f5c47d7dd 100644 --- a/test/syntaxes/utils/tokenizer.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -1,278 +1,275 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; - -const registry = new Registry(); -const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp-new.json'); -const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] - -export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { - if (typeof input === "string") { - input = Input.FromText(input); - } - - let tokens: Token[] = []; - let previousStack: StackElement = null; - - for (let lineIndex = 0; lineIndex < input.lines.length; lineIndex++) { - const line = input.lines[lineIndex]; - - let lineResult = grammar.tokenizeLine(line, previousStack); - previousStack = lineResult.ruleStack; - - if (lineIndex < input.span.startLine || lineIndex > input.span.endLine) { - continue; - } - - for (const token of lineResult.tokens) { - if ((lineIndex === input.span.startLine && token.startIndex < input.span.startIndex) || - (lineIndex === input.span.endLine && token.endIndex > input.span.endIndex)) { - continue; - } - - const text = line.substring(token.startIndex, token.endIndex); - const type = token.scopes[token.scopes.length - 1]; - - if (excludeTypes === false || excludedTypes.indexOf(type) < 0) { - tokens.push(new Token(text, type)); - } - } - } - - return tokens; -} - -export class Span { - constructor( - public startLine: number, - public startIndex: number, - public endLine: number, - public endIndex: number) { } -} - -export class Input { - private constructor( - public lines: string[], - public span: Span) { } - - public static FromText(text: string) { - // ensure consistent line-endings irrelevant of OS - text = text.replace('\r\n', '\n'); - let lines = text.split('\n'); - - return new Input(lines, new Span(0, 0, lines.length - 1, lines[lines.length - 1].length)); - } - - public static InClass(input: string) { - let text = ` -class TestClass { - ${input} -}`; - - // ensure consistent line-endings irrelevant of OS - text = text.replace('\r\n', '\n'); - let lines = text.split('\n'); - - return new Input(lines, new Span(2, 4, lines.length - 1, 0)); - } - - public static InMethod(input: string) { - let text = ` -class TestClass { - void TestMethod() { - ${input} - } -}`; - - // ensure consistent line-endings irrelevant of OS - text = text.replace('\r\n', '\n'); - let lines = text.split('\n'); - - return new Input(lines, new Span(3, 8, lines.length - 2, 0)); - } - - public static InNamespace(input: string) { - let text = ` -namespace TestNamespace { - ${input} -}`; - - // ensure consistent line-endings irrelevant of OS - text = text.replace('\r\n', '\n'); - let lines = text.split('\n'); - - return new Input(lines, new Span(2, 4, lines.length - 1, 0)); - } -} - -export class Token { - constructor( - public text: string, - public type: string) { } -} - -export namespace Tokens { - - function createToken(text: string, type: string): Token { - return new Token(text, type); - } - - export namespace Comment { - export const LeadingWhitespace = (text: string) => createToken(text, 'punctuation.whitespace.comment.leading.cs'); - - export namespace MultiLine { - export const End = createToken('*/', 'punctuation.definition.comment.cs'); - export const Start = createToken('/*', 'punctuation.definition.comment.cs'); - - export const Text = (text: string) => createToken(text, 'comment.block.cs'); - } - - export namespace SingleLine { - export const Start = createToken('//', 'punctuation.definition.comment.cs'); - - export const Text = (text: string) => createToken(text, 'comment.line.double-slash.cs'); - } - } - - export namespace Identifiers { - export const AliasName = (text: string) => createToken(text, 'entity.name.type.alias.cs'); - export const ClassName = (text: string) => createToken(text, 'entity.name.type.class.cs'); - export const DelegateName = (text: string) => createToken(text, 'entity.name.type.delegate.cs'); - export const EnumName = (text: string) => createToken(text, 'entity.name.type.enum.cs'); - export const EventName = (text: string) => createToken(text, 'entity.name.variable.event.cs'); - export const FieldName = (text: string) => createToken(text, 'entity.name.variable.field.cs'); - export const InterfaceName = (text: string) => createToken(text, 'entity.name.type.interface.cs'); - export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); - export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); - export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); - export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); - } - - export namespace Keywords { - export namespace Modifiers { - export const Abstract = createToken('abstract', 'storage.modifier.cs'); - export const Const = createToken('const', 'storage.modifier.cs'); - export const Internal = createToken('internal', 'storage.modifier.cs'); - export const New = createToken('new', 'storage.modifier.cs'); - export const Out = createToken('out', 'storage.modifier.cs'); - export const Params = createToken('params', 'storage.modifier.cs'); - export const Partial = createToken('partial', 'storage.modifier.cs'); - export const Private = createToken('private', 'storage.modifier.cs'); - export const Protected = createToken('protected', 'storage.modifier.cs'); - export const Public = createToken('public', 'storage.modifier.cs'); - export const ReadOnly = createToken('readonly', 'storage.modifier.cs'); - export const Ref = createToken('ref', 'storage.modifier.cs'); - export const Sealed = createToken('sealed', 'storage.modifier.cs'); - export const Static = createToken('static', 'storage.modifier.cs'); - } - - export const Add = createToken('add', 'keyword.other.add.cs'); - export const Alias = createToken('alias', 'keyword.other.alias.cs'); - export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); - export const Class = createToken('class', 'keyword.other.class.cs'); - export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); - export const Enum = createToken('enum', 'keyword.other.enum.cs'); - export const Event = createToken('event', 'keyword.other.event.cs'); - export const Extern = createToken('extern', 'keyword.other.extern.cs'); - export const Get = createToken('get', 'keyword.other.get.cs'); - export const Interface = createToken('interface', 'keyword.other.interface.cs'); - export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); - export const New = createToken('new', 'keyword.other.new.cs'); - export const Remove = createToken('remove', 'keyword.other.remove.cs'); - export const Return = createToken('return', 'keyword.control.flow.cs'); - export const Set = createToken('set', 'keyword.other.set.cs'); - export const Static = createToken('static', 'keyword.other.static.cs'); - export const Struct = createToken('struct', 'keyword.other.struct.cs'); - export const This = createToken('this', 'keyword.other.this.cs'); - export const Using = createToken('using', 'keyword.other.using.cs'); - export const Where = createToken('where', 'keyword.other.where.cs'); - } - - export namespace Literals { - export namespace Boolean { - export const False = createToken('false', 'constant.language.boolean.false.cs'); - export const True = createToken('true', 'constant.language.boolean.true.cs'); - } - - export const Null = createToken('null', 'constant.language.null.cs'); - - export namespace Numeric { - export const Binary = (text: string) => createToken(text, 'constant.numeric.binary.cs'); - export const Decimal = (text: string) => createToken(text, 'constant.numeric.decimal.cs'); - export const Hexadecimal = (text: string) => createToken(text, 'constant.numeric.hex.cs'); - } - - export const String = (text: string) => createToken(text, 'string.quoted.double.cs'); - } - - export namespace Operators { - export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); - - export namespace Arithmetic { - export const Addition = createToken('+', 'keyword.operator.arithmetic.cs'); - export const Division = createToken('/', 'keyword.operator.arithmetic.cs'); - export const Multiplication = createToken('*', 'keyword.operator.arithmetic.cs'); - export const Remainder = createToken('%', 'keyword.operator.arithmetic.cs'); - export const Subtraction = createToken('-', 'keyword.operator.arithmetic.cs'); - } - - export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); - } - - export namespace Puncuation { - export const Accessor = createToken('.', 'punctuation.accessor.cs'); - export const Colon = createToken(':', 'punctuation.separator.colon.cs'); - export const Comma = createToken(',', 'punctuation.separator.comma.cs'); - - export namespace CurlyBrace { - export const Close = createToken('}', 'punctuation.curlybrace.close.cs'); - export const Open = createToken('{', 'punctuation.curlybrace.open.cs'); - } - - export namespace Interpolation { - export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); - export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); - } - - export namespace InterpolatedString { - export const Begin = createToken('$"', 'punctuation.definition.string.begin.cs'); - export const End = createToken('"', 'punctuation.definition.string.end.cs'); - export const VerbatimBegin = createToken('$@"', 'punctuation.definition.string.begin.cs'); - } - - export namespace Parenthesis { - export const Close = createToken(')', 'punctuation.parenthesis.close.cs'); - export const Open = createToken('(', 'punctuation.parenthesis.open.cs'); - } - - export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); - - export namespace SquareBracket { - export const Close = createToken(']', 'punctuation.squarebracket.close.cs'); - export const Open = createToken('[', 'punctuation.squarebracket.open.cs'); - } - - export namespace String { - export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); - export const End = createToken('"', 'punctuation.definition.string.end.cs'); - } - - export namespace TypeParameters { - export const Begin = createToken('<', 'punctuation.definition.typeparameters.begin.cs'); - export const End = createToken('>', 'punctuation.definition.typeparameters.end.cs'); - } - } - - export namespace Variables { - export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); - export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); - export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); - export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); - } - - export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); - export const Type = (text: string) => createToken(text, 'storage.type.cs'); -} +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; + +const registry = new Registry(); +const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp-new.json'); +const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] + +export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { + if (typeof input === "string") { + input = Input.FromText(input); + } + + let tokens: Token[] = []; + let previousStack: StackElement = null; + + for (let lineIndex = 0; lineIndex < input.lines.length; lineIndex++) { + const line = input.lines[lineIndex]; + + let lineResult = grammar.tokenizeLine(line, previousStack); + previousStack = lineResult.ruleStack; + + if (lineIndex < input.span.startLine || lineIndex > input.span.endLine) { + continue; + } + + for (const token of lineResult.tokens) { + if ((lineIndex === input.span.startLine && token.startIndex < input.span.startIndex) || + (lineIndex === input.span.endLine && token.endIndex > input.span.endIndex)) { + continue; + } + + const text = line.substring(token.startIndex, token.endIndex); + const type = token.scopes[token.scopes.length - 1]; + + if (excludeTypes === false || excludedTypes.indexOf(type) < 0) { + tokens.push(createToken(text, type)); + } + } + } + + return tokens; +} + +interface Span { + startLine: number; + startIndex: number; + endLine: number; + endIndex: number; +} + +export class Input { + private constructor( + public lines: string[], + public span: Span) { } + + public static FromText(text: string) { + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 0, startIndex: 0, endLine: lines.length - 1, endIndex: lines[lines.length - 1].length }); + } + + public static InClass(input: string) { + let text = ` +class TestClass { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); + } + + public static InMethod(input: string) { + let text = ` +class TestClass { + void TestMethod() { + ${input} + } +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 3, startIndex: 8, endLine: lines.length - 2, endIndex: 0 }); + } + + public static InNamespace(input: string) { + let text = ` +namespace TestNamespace { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); + } +} + +export interface Token { + text: string; + type: string; +} + +function createToken(text: string, type: string) { + return { text, type }; +} + +export namespace Token { + export namespace Comment { + export const LeadingWhitespace = (text: string) => createToken(text, 'punctuation.whitespace.comment.leading.cs'); + + export namespace MultiLine { + export const End = createToken('*/', 'punctuation.definition.comment.cs'); + export const Start = createToken('/*', 'punctuation.definition.comment.cs'); + + export const Text = (text: string) => createToken(text, 'comment.block.cs'); + } + + export namespace SingleLine { + export const Start = createToken('//', 'punctuation.definition.comment.cs'); + + export const Text = (text: string) => createToken(text, 'comment.line.double-slash.cs'); + } + } + + export namespace Identifiers { + export const AliasName = (text: string) => createToken(text, 'entity.name.type.alias.cs'); + export const ClassName = (text: string) => createToken(text, 'entity.name.type.class.cs'); + export const DelegateName = (text: string) => createToken(text, 'entity.name.type.delegate.cs'); + export const EnumName = (text: string) => createToken(text, 'entity.name.type.enum.cs'); + export const EventName = (text: string) => createToken(text, 'entity.name.variable.event.cs'); + export const FieldName = (text: string) => createToken(text, 'entity.name.variable.field.cs'); + export const InterfaceName = (text: string) => createToken(text, 'entity.name.type.interface.cs'); + export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); + export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); + export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); + export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); + } + + export namespace Keywords { + export namespace Modifiers { + export const Abstract = createToken('abstract', 'storage.modifier.cs'); + export const Const = createToken('const', 'storage.modifier.cs'); + export const Internal = createToken('internal', 'storage.modifier.cs'); + export const New = createToken('new', 'storage.modifier.cs'); + export const Out = createToken('out', 'storage.modifier.cs'); + export const Params = createToken('params', 'storage.modifier.cs'); + export const Partial = createToken('partial', 'storage.modifier.cs'); + export const Private = createToken('private', 'storage.modifier.cs'); + export const Protected = createToken('protected', 'storage.modifier.cs'); + export const Public = createToken('public', 'storage.modifier.cs'); + export const ReadOnly = createToken('readonly', 'storage.modifier.cs'); + export const Ref = createToken('ref', 'storage.modifier.cs'); + export const Sealed = createToken('sealed', 'storage.modifier.cs'); + export const Static = createToken('static', 'storage.modifier.cs'); + } + + export const Add = createToken('add', 'keyword.other.add.cs'); + export const Alias = createToken('alias', 'keyword.other.alias.cs'); + export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); + export const Class = createToken('class', 'keyword.other.class.cs'); + export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); + export const Enum = createToken('enum', 'keyword.other.enum.cs'); + export const Event = createToken('event', 'keyword.other.event.cs'); + export const Extern = createToken('extern', 'keyword.other.extern.cs'); + export const Get = createToken('get', 'keyword.other.get.cs'); + export const Interface = createToken('interface', 'keyword.other.interface.cs'); + export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); + export const New = createToken('new', 'keyword.other.new.cs'); + export const Remove = createToken('remove', 'keyword.other.remove.cs'); + export const Return = createToken('return', 'keyword.control.flow.cs'); + export const Set = createToken('set', 'keyword.other.set.cs'); + export const Static = createToken('static', 'keyword.other.static.cs'); + export const Struct = createToken('struct', 'keyword.other.struct.cs'); + export const This = createToken('this', 'keyword.other.this.cs'); + export const Using = createToken('using', 'keyword.other.using.cs'); + export const Where = createToken('where', 'keyword.other.where.cs'); + } + + export namespace Literals { + export namespace Boolean { + export const False = createToken('false', 'constant.language.boolean.false.cs'); + export const True = createToken('true', 'constant.language.boolean.true.cs'); + } + + export const Null = createToken('null', 'constant.language.null.cs'); + + export namespace Numeric { + export const Binary = (text: string) => createToken(text, 'constant.numeric.binary.cs'); + export const Decimal = (text: string) => createToken(text, 'constant.numeric.decimal.cs'); + export const Hexadecimal = (text: string) => createToken(text, 'constant.numeric.hex.cs'); + } + + export const String = (text: string) => createToken(text, 'string.quoted.double.cs'); + } + + export namespace Operators { + export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); + + export namespace Arithmetic { + export const Addition = createToken('+', 'keyword.operator.arithmetic.cs'); + export const Division = createToken('/', 'keyword.operator.arithmetic.cs'); + export const Multiplication = createToken('*', 'keyword.operator.arithmetic.cs'); + export const Remainder = createToken('%', 'keyword.operator.arithmetic.cs'); + export const Subtraction = createToken('-', 'keyword.operator.arithmetic.cs'); + } + + export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); + } + + export namespace Puncuation { + export const Accessor = createToken('.', 'punctuation.accessor.cs'); + export const Colon = createToken(':', 'punctuation.separator.colon.cs'); + export const Comma = createToken(',', 'punctuation.separator.comma.cs'); + + export namespace CurlyBrace { + export const Close = createToken('}', 'punctuation.curlybrace.close.cs'); + export const Open = createToken('{', 'punctuation.curlybrace.open.cs'); + } + + export namespace Interpolation { + export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); + export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); + } + + export namespace InterpolatedString { + export const Begin = createToken('$"', 'punctuation.definition.string.begin.cs'); + export const End = createToken('"', 'punctuation.definition.string.end.cs'); + export const VerbatimBegin = createToken('$@"', 'punctuation.definition.string.begin.cs'); + } + + export namespace Parenthesis { + export const Close = createToken(')', 'punctuation.parenthesis.close.cs'); + export const Open = createToken('(', 'punctuation.parenthesis.open.cs'); + } + + export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); + + export namespace SquareBracket { + export const Close = createToken(']', 'punctuation.squarebracket.close.cs'); + export const Open = createToken('[', 'punctuation.squarebracket.open.cs'); + } + + export namespace String { + export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); + export const End = createToken('"', 'punctuation.definition.string.end.cs'); + } + + export namespace TypeParameters { + export const Begin = createToken('<', 'punctuation.definition.typeparameters.begin.cs'); + export const End = createToken('>', 'punctuation.definition.typeparameters.end.cs'); + } + } + + export namespace Variables { + export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); + export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); + export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); + export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); + } + + export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); + export const Type = (text: string) => createToken(text, 'storage.type.cs'); +} From 5588e5b213eb89538f32073d53b1eb49a1f2821b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 14:28:01 -0800 Subject: [PATCH 043/192] Add while and do..while loops --- syntaxes/csharp-new.json | 29 +++++++++++++++++++ test/syntaxes/indexers.test.syntax.ts | 2 ++ .../interation-statements.test.syntax.ts | 28 ++++++++++++++++-- test/syntaxes/utils/tokenize.ts | 2 ++ 4 files changed, 58 insertions(+), 3 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index a756948f41..caf866fefa 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -131,6 +131,9 @@ { "include": "#expression" }, + { + "include": "#block" + }, { "include": "#punctuation-semicolon" } @@ -153,6 +156,9 @@ { "include": "#object-creation-expression" }, + { + "include": "#parenthesized-expression" + }, { "include": "#identifier" } @@ -1128,6 +1134,10 @@ }, "control-statement": { "patterns": [ + { + "name": "keyword.control.loop.cs", + "match": "(? { before(() => should()); - describe.skip("Iteration statements (loops)", () => { - it("single-line declaration with no parameters", () => { - + describe("Iteration statements (loops)", () => { + it("single-line while loop", () => { const input = Input.InMethod(`while (true) { }`); const tokens = tokenize(input); tokens.should.deep.equal([ + Token.Keywords.While, + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close + ]); + }); + + it("single-line do..while loop", () => { + + const input = Input.InMethod(`do { } while (true);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Do, + Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CurlyBrace.Close, + Token.Keywords.While, + Token.Puncuation.Parenthesis.Open, + Token.Literals.Boolean.True, + Token.Puncuation.Parenthesis.Close, + Token.Puncuation.Semicolon ]); }); }); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 9f5c47d7dd..86104730a7 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -171,6 +171,7 @@ export namespace Token { export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); + export const Do = createToken('do', 'keyword.control.loop.cs'); export const Enum = createToken('enum', 'keyword.other.enum.cs'); export const Event = createToken('event', 'keyword.other.event.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); @@ -186,6 +187,7 @@ export namespace Token { export const This = createToken('this', 'keyword.other.this.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); + export const While = createToken('while', 'keyword.control.loop.cs'); } export namespace Literals { From ffe53960493d4bdd132d721bd854f6c2cadfb55f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 14:32:13 -0800 Subject: [PATCH 044/192] Rename file --- ...tements.test.syntax.ts => iteration-statements.test.syntax.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename test/syntaxes/{interation-statements.test.syntax.ts => iteration-statements.test.syntax.ts} (100%) diff --git a/test/syntaxes/interation-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts similarity index 100% rename from test/syntaxes/interation-statements.test.syntax.ts rename to test/syntaxes/iteration-statements.test.syntax.ts From 6710e7e757b36dda1134a95bc72a9b2a4ca1357f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 28 Dec 2016 14:47:30 -0800 Subject: [PATCH 045/192] Shorten names for a few oft-used tokens --- test/syntaxes/attributes.test.syntax.ts | 72 +++++++++--------- test/syntaxes/classes.test.syntax.ts | 76 +++++++++---------- test/syntaxes/delegates.test.syntax.ts | 20 ++--- test/syntaxes/enums.test.syntax.ts | 20 ++--- test/syntaxes/events.test.syntax.ts | 12 +-- test/syntaxes/fields.test.syntax.ts | 4 +- test/syntaxes/indexers.test.syntax.ts | 16 ++-- test/syntaxes/interfaces.test.syntax.ts | 24 +++--- .../iteration-statements.test.syntax.ts | 16 ++-- test/syntaxes/methods.test.syntax.ts | 20 ++--- test/syntaxes/namespaces.test.syntax.ts | 24 +++--- test/syntaxes/properties.test.syntax.ts | 64 ++++++++-------- test/syntaxes/structs.test.syntax.ts | 20 ++--- test/syntaxes/utils/tokenize.ts | 21 ++--- 14 files changed, 200 insertions(+), 209 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index 3a44f464c7..dffd409026 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -16,9 +16,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseBracket]); }); it("global attribute with specifier", () => { @@ -27,11 +27,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Keywords.AttributeSpecifier("assembly"), Token.Puncuation.Colon, Token.Type("Foo"), - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseBracket]); }); it("Two global attributes in same section with specifier", () => { @@ -40,13 +40,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), Token.Puncuation.Colon, Token.Type("Foo"), Token.Puncuation.Comma, Token.Type("Bar"), - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseBracket]); }); it("Two global attributes in same section with specifier and empty argument lists", () => { @@ -55,17 +55,17 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), Token.Puncuation.Colon, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Puncuation.Comma, Token.Type("Bar"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with one argument", () => { @@ -74,12 +74,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with two arguments", () => { @@ -88,14 +88,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.Comma, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with three arguments", () => { @@ -104,9 +104,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.Comma, Token.Literals.Numeric.Decimal("42"), @@ -114,8 +114,8 @@ describe("Grammar", () => { Token.Puncuation.String.Begin, Token.Literals.String("text"), Token.Puncuation.String.End, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with named argument", () => { @@ -124,14 +124,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Identifiers.PropertyName("Bar"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with one positional argument and one named argument", () => { @@ -140,16 +140,16 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.Comma, Token.Identifiers.PropertyName("Bar"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); it("Global attribute with specifier, one positional argument, and two named arguments", () => { @@ -158,11 +158,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), Token.Puncuation.Colon, Token.Type("Foo"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.Comma, Token.Identifiers.PropertyName("Bar"), @@ -174,8 +174,8 @@ describe("Grammar", () => { Token.Puncuation.String.Begin, Token.Literals.String("hello"), Token.Puncuation.String.End, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.SquareBracket.Close]); + Token.Puncuation.CloseParen, + Token.Puncuation.CloseBracket]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index fee241060a..3bfa377a77 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -37,58 +37,58 @@ public abstract class PublicAbstractClass { } Token.Keywords.Modifiers.Public, Token.Keywords.Class, Token.Identifiers.ClassName("PublicClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Internal, Token.Keywords.Class, Token.Identifiers.ClassName("InternalClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Static, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultStaticClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, Token.Keywords.Class, Token.Identifiers.ClassName("PublicStaticClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Sealed, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultSealedClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Sealed, Token.Keywords.Class, Token.Identifiers.ClassName("PublicSealedClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Abstract, Token.Keywords.Class, Token.Identifiers.ClassName("PublicAbstractClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Abstract, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultAbstractClass"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generics in identifier", () => { @@ -99,8 +99,8 @@ public abstract class PublicAbstractClass { } tokens.should.deep.equal([ Token.Keywords.Class, Token.Identifiers.ClassName("Dictionary"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("inheritance", () => { @@ -119,8 +119,8 @@ class PublicClass : Dictionary>, IMap"), @@ -137,8 +137,8 @@ class PublicClass : Dictionary>, IMap"), @@ -166,8 +166,8 @@ class PublicClass : Dictionary>, IMap { @@ -188,8 +188,8 @@ class PublicClass : Dictionary[]>, ISomething Token.Type("T"), Token.Puncuation.Colon, Token.Type("ISomething"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Class, Token.Identifiers.ClassName("PublicClass"), @@ -202,8 +202,8 @@ class PublicClass : Dictionary[]>, ISomething Token.Puncuation.TypeParameters.Begin, Token.Type("string"), Token.Puncuation.TypeParameters.End, - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Puncuation.TypeParameters.End, Token.Puncuation.Comma, Token.Type("ISomething"), @@ -213,14 +213,14 @@ class PublicClass : Dictionary[]>, ISomething Token.Type("ICar"), Token.Puncuation.Comma, Token.Keywords.New, - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Keywords.Where, Token.Type("X"), Token.Puncuation.Colon, Token.Keywords.Struct, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("nested class", () => { @@ -238,15 +238,15 @@ class Klass tokens.should.deep.equal([ Token.Keywords.Class, Token.Identifiers.ClassName("Klass"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Class, Token.Identifiers.ClassName("Nested"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 167da76d20..c399dcc004 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -19,8 +19,8 @@ describe("Grammar", () => { Token.Keywords.Delegate, Token.Type("void"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); }); @@ -33,10 +33,10 @@ describe("Grammar", () => { Token.Keywords.Delegate, Token.Type("TResult"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg1"), - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); }); @@ -53,8 +53,8 @@ delegate void D() Token.Keywords.Delegate, Token.Type("void"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Keywords.Where, Token.Type("T1"), Token.Puncuation.Colon, @@ -71,7 +71,7 @@ delegate void D() Token.Keywords.Delegate, Token.Type("int"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.Type("string"), Token.Variables.Parameter("x"), @@ -82,10 +82,10 @@ delegate void D() Token.Puncuation.Comma, Token.Keywords.Modifiers.Params, Token.Type("object"), - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Variables.Parameter("z"), - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); }); }); diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 15dc8c7278..df885b04f8 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("enum with base type", () => { @@ -32,8 +32,8 @@ describe("Grammar", () => { Token.Identifiers.EnumName("E"), Token.Puncuation.Colon, Token.Type("byte"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("enum with single member", () => { @@ -44,9 +44,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Variables.EnumMember("M1"), - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("enum with multiple members", () => { @@ -57,13 +57,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("Color"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Variables.EnumMember("Red"), Token.Puncuation.Comma, Token.Variables.EnumMember("Green"), Token.Puncuation.Comma, Token.Variables.EnumMember("Blue"), - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("enum with initialized member", () => { @@ -82,7 +82,7 @@ enum E tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Variables.EnumMember("Value1"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("1"), @@ -90,7 +90,7 @@ enum E Token.Variables.EnumMember("Value2"), Token.Puncuation.Comma, Token.Variables.EnumMember("Value3"), - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 2e3989be23..a9ffe0965d 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -94,14 +94,14 @@ public event Type Event Token.Keywords.Event, Token.Type("Type"), Token.Identifiers.EventName("Event"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Add, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Remove, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 3aefb1d7d4..277aa0d374 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -103,8 +103,8 @@ string[] field123;`); Token.Puncuation.Semicolon, Token.Type("string"), - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Identifiers.FieldName("field123"), Token.Puncuation.Semicolon]); }); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index afc07b30f1..b3a4b987c2 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -24,21 +24,21 @@ public string this[int index] Token.Keywords.Modifiers.Public, Token.Type("string"), Token.Keywords.This, - Token.Puncuation.SquareBracket.Open, + Token.Puncuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.SquareBracket.Close, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CloseBracket, + Token.Puncuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("index"), Token.Variables.ReadWrite("ToString"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 7a9aca3ed8..7a4f776af2 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("interface inheritance", () => { @@ -34,14 +34,14 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Interface, Token.Identifiers.InterfaceName("IBar"), Token.Puncuation.Colon, Token.Type("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generic interface", () => { @@ -52,8 +52,8 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generic interface with variance", () => { @@ -64,8 +64,8 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generic interface with constraints", () => { @@ -80,8 +80,8 @@ interface IBar : IFoo { } Token.Type("T1"), Token.Puncuation.Colon, Token.Type("T2"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 6ae4accb77..91b9805add 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -16,11 +16,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.While, - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace ]); }); @@ -31,12 +31,12 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Do, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.While, - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon ]); }); diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index c3ad030e91..a3356a5116 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -18,10 +18,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("void"), Token.Identifiers.MethodName("Foo"), - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("declaration with two parameters", () => { @@ -36,20 +36,20 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("int"), Token.Identifiers.MethodName("Add"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), Token.Puncuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.Parenthesis.Close, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("expression body", () => { @@ -60,13 +60,13 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("int"), Token.Identifiers.MethodName("Add"), - Token.Puncuation.Parenthesis.Open, + Token.Puncuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), Token.Puncuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.CloseParen, Token.Operators.Arrow, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index 376658ab20..6f306dca3e 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -21,8 +21,8 @@ namespace TestNamespace tokens.should.deep.equal([ Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("has a namespace keyword and a dotted name", () => { @@ -38,8 +38,8 @@ namespace Test.Namespace Token.Identifiers.NamespaceName("Test"), Token.Puncuation.Accessor, Token.Identifiers.NamespaceName("Namespace"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("can be nested", () => { @@ -56,14 +56,14 @@ namespace TestNamespace tokens.should.deep.equal([ Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Namespace, Token.Identifiers.NamespaceName("NestedNamespace"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); it("can contain using statements", () => { @@ -100,7 +100,7 @@ namespace TestNamespace Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Using, Token.Identifiers.NamespaceName("UsingTwo"), @@ -116,7 +116,7 @@ namespace TestNamespace Token.Keywords.Namespace, Token.Identifiers.NamespaceName("NestedNamespace"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Using, Token.Identifiers.NamespaceName("UsingThree"), @@ -130,8 +130,8 @@ namespace TestNamespace Token.Type("Something"), Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 95cb991a27..b4fb791622 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -24,21 +24,21 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Return, Token.Literals.Null, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CloseBrace, Token.Keywords.Set, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Variables.ReadWrite("something"), Token.Operators.Assignment, Token.Variables.ReadWrite("value"), Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); it("declaration single line", () => { @@ -50,22 +50,22 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Return, Token.Literals.Null, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CloseBrace, Token.Keywords.Modifiers.Private, Token.Keywords.Set, - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Variables.ReadWrite("something"), Token.Operators.Assignment, Token.Variables.ReadWrite("value"), Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace]); }); it("declaration without modifiers", () => { @@ -76,12 +76,12 @@ public IBooom Property tokens.should.deep.equal([ Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, Token.Keywords.Set, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("auto-property single line", function () { @@ -93,12 +93,12 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, Token.Keywords.Set, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("auto-property single line (protected internal)", function () { @@ -111,12 +111,12 @@ public IBooom Property Token.Keywords.Modifiers.Internal, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, Token.Keywords.Set, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("auto-property", () => { @@ -133,12 +133,12 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, Token.Keywords.Set, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("generic auto-property", () => { @@ -156,16 +156,16 @@ public IBooom Property Token.Puncuation.TypeParameters.Begin, Token.Type("T"), Token.Puncuation.TypeParameters.End, - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Puncuation.TypeParameters.End, Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, Token.Keywords.Set, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.CloseBrace]); }); it("auto-property initializer", () => { @@ -183,14 +183,14 @@ public IBooom Property Token.Puncuation.TypeParameters.Begin, Token.Type("T"), Token.Puncuation.TypeParameters.End, - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Puncuation.TypeParameters.End, Token.Identifiers.PropertyName("Property"), - Token.Puncuation.CurlyBrace.Open, + Token.Puncuation.OpenBrace, Token.Keywords.Get, Token.Puncuation.Semicolon, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.CloseBrace, Token.Operators.Assignment, Token.Keywords.New, Token.Type("Dictionary"), @@ -201,11 +201,11 @@ public IBooom Property Token.Puncuation.TypeParameters.Begin, Token.Type("T"), Token.Puncuation.TypeParameters.End, - Token.Puncuation.SquareBracket.Open, - Token.Puncuation.SquareBracket.Close, + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, Token.Puncuation.TypeParameters.End, - Token.Puncuation.Parenthesis.Open, - Token.Puncuation.Parenthesis.Close, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); }); diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index f64235b736..8495ca7042 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Struct, Token.Identifiers.StructName("S"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("struct interface implementation", () => { @@ -33,14 +33,14 @@ struct S : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, Token.Keywords.Struct, Token.Identifiers.StructName("S"), Token.Puncuation.Colon, Token.Type("IFoo"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generic struct", () => { @@ -53,8 +53,8 @@ struct S { } tokens.should.deep.equal([ Token.Keywords.Struct, Token.Identifiers.StructName("S"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); it("generic struct with constraints", () => { @@ -71,8 +71,8 @@ struct S where T1 : T2 { } Token.Type("T1"), Token.Puncuation.Colon, Token.Type("T2"), - Token.Puncuation.CurlyBrace.Open, - Token.Puncuation.CurlyBrace.Close]); + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 86104730a7..87089d01ac 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -223,13 +223,14 @@ export namespace Token { export namespace Puncuation { export const Accessor = createToken('.', 'punctuation.accessor.cs'); + export const CloseBrace = createToken('}', 'punctuation.curlybrace.close.cs'); + export const CloseBracket = createToken(']', 'punctuation.squarebracket.close.cs'); + export const CloseParen = createToken(')', 'punctuation.parenthesis.close.cs'); export const Colon = createToken(':', 'punctuation.separator.colon.cs'); export const Comma = createToken(',', 'punctuation.separator.comma.cs'); - - export namespace CurlyBrace { - export const Close = createToken('}', 'punctuation.curlybrace.close.cs'); - export const Open = createToken('{', 'punctuation.curlybrace.open.cs'); - } + export const OpenBrace = createToken('{', 'punctuation.curlybrace.open.cs'); + export const OpenBracket = createToken('[', 'punctuation.squarebracket.open.cs'); + export const OpenParen = createToken('(', 'punctuation.parenthesis.open.cs'); export namespace Interpolation { export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); @@ -242,18 +243,8 @@ export namespace Token { export const VerbatimBegin = createToken('$@"', 'punctuation.definition.string.begin.cs'); } - export namespace Parenthesis { - export const Close = createToken(')', 'punctuation.parenthesis.close.cs'); - export const Open = createToken('(', 'punctuation.parenthesis.open.cs'); - } - export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); - export namespace SquareBracket { - export const Close = createToken(']', 'punctuation.squarebracket.close.cs'); - export const Open = createToken('[', 'punctuation.squarebracket.open.cs'); - } - export namespace String { export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); export const End = createToken('"', 'punctuation.definition.string.end.cs'); From 90bd7173da816780d8633b1a634db344fa029dfd Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 31 Dec 2016 08:12:39 -0800 Subject: [PATCH 046/192] Add tuple types --- syntaxes/csharp-new.json | 122 ++++++++++++++++++---------- syntaxes/syntax.md | 39 +++++---- test/syntaxes/fields.test.syntax.ts | 66 +++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 4 files changed, 169 insertions(+), 59 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index caf866fefa..c7d6799c71 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -444,38 +444,41 @@ ] }, "delegate-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private)\\s+)*)(?:delegate)\\s+)", + "begin": "(?=(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?:\\())", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "name": "storage.modifier.cs", - "match": "\\b(new|public|protected|internal|private)\\b" - }, - { - "begin": "\\b(delegate)\\b\\s+", - "beginCaptures": { + "match": "(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?=\\()", + "captures": { "1": { + "patterns": [ + { + "match": "\\b(new|public|protected|internal|private)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { "name": "keyword.other.delegate.cs" - } - }, - "end": "(?=\\()", - "patterns": [ - { - "comment": "C# grammar: identifier variant-type-parameter-list[opt] (", - "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)\\s*(?=\\()", - "captures": { - "1": { - "name": "entity.name.type.delegate.cs" + }, + "3": { + "patterns": [ + { + "include": "#type" } - } + ] }, - { - "include": "#type" + "4": { + "name": "entity.name.type.delegate.cs" } - ] + } }, { "include": "#parenthesized-parameter-list" @@ -761,14 +764,14 @@ ] }, "field-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "begin": "\\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", + "begin": "(?(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", "beginCaptures": { "1": { "patterns": [ @@ -813,14 +816,14 @@ ] }, "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", "captures": { "1": { "patterns": [ @@ -858,14 +861,14 @@ ] }, "indexer-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?this)\\s*(?:\\[))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?this)\\s*(?=\\[)", + "match": "(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", "captures": { "1": { "patterns": [ @@ -912,14 +915,14 @@ ] }, "event-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", "captures": { "1": { "patterns": [ @@ -1029,14 +1032,14 @@ ] }, "method-declaration": { - "begin": "(?=\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "\\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", "captures": { "1": { "patterns": [ @@ -1063,13 +1066,7 @@ } }, { - "begin": "(?=\\()", - "end": "(?=\\))", - "patterns": [ - { - "include": "#parenthesized-parameter-list" - } - ] + "include": "#parenthesized-parameter-list" }, { "include": "#expression-body" @@ -1411,15 +1408,15 @@ ] }, "parenthesized-parameter-list": { - "begin": "(?=(\\())", + "begin": "(\\()", "beginCaptures": { - "1": { + "0": { "name": "punctuation.parenthesis.open.cs" } }, - "end": "(?=(\\)))", + "end": "(\\))", "endCaptures": { - "1": { + "0": { "name": "punctuation.parenthesis.close.cs" } }, @@ -1460,6 +1457,9 @@ { "include": "#comment" }, + { + "include": "#tuple-type" + }, { "include": "#type-builtin" }, @@ -1474,6 +1474,44 @@ } ] }, + "tuple-type": { + "patterns": [ + { + "begin": "\\(", + "beginCaptures": { + "0": { + "name": "punctuation.parenthesis.open.cs" + } + }, + "end": "\\)", + "endCaptures": { + "0": { + "name": "punctuation.parenthesis.close.cs" + } + }, + "patterns": [ + { + "match": "(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(?:\\s+(?[_$[:alpha:]][_$[:alnum:]]*))?", + "captures": { + "1":{ + "patterns": [ + { + "include": "#type" + } + ] + }, + "2": { + "name": "entity.name.variable.tuple.cs" + } + } + }, + { + "include": "#punctuation-comma" + } + ] + } + ] + }, "type-builtin": { "patterns": [ { diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 4c14cc419f..814e76cc36 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -44,28 +44,33 @@ #### Type name -* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` -* Matches: `System.Collections.Generic.Dictionary, System.List>>` +* Expression: `(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` +* Matches: `System.Collections.Generic.Dictionary, System.List>>` + +#### Delegate declarations + +* Expression: `(?=(?(?:(?:new|public|protected|internal|private)\s+)*)(?(?:\b(?:delegate)))\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*<\s*(?:(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\s*(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\s*>\s*)?))\s*(?:\())` +* Matches: `delegate (int, int) Foo();` #### Field declaratiosn Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` * Break down: - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` + * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * First field name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?!=>)(?:;|=)` #### Event declarations -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` * Break down: - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` + * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Event keyword: `\s*\b(?event)\b` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?=\{|;|$)` @@ -74,28 +79,28 @@ Further field names are matched by looking for identifiers, #punctuation-comma, Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. -* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` +* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` * Break down: * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` + * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` * End: `\s*(?:\{|=>|$))` #### Indexer declarations -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?this)\s*(?:\[))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` * Break down: - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` + * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?this)` * End: `\s*(?:\[))` #### Method declarations -* Expression: `(?=\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` * Break down: - * Storage modifiers: `\b(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` - * Type name: `\s*(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)?)` + * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` + * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Method name and type parameters: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` * End: `\s*(?:\[))` \ No newline at end of file diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 277aa0d374..737f810a53 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -155,6 +155,72 @@ const bool field = true;`); Token.Literals.Numeric.Decimal("42"), Token.Puncuation.Semicolon]); }); + + it("tuple type with no names and no modifiers", () => { + + const input = Input.InClass(`(int, int) x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("tuple type with no names and private modifier", () => { + + const input = Input.InClass(`private (int, int) x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("tuple type with names and no modifiers", () => { + + const input = Input.InClass(`(int x, int y) z;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Tuple("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Tuple("y"), + Token.Puncuation.CloseParen, + Token.Identifiers.FieldName("z"), + Token.Puncuation.Semicolon]); + }); + + it("tuple type with names and private modifier", () => { + + const input = Input.InClass(`private (int x, int y) z;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Tuple("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Tuple("y"), + Token.Puncuation.CloseParen, + Token.Identifiers.FieldName("z"), + Token.Puncuation.Semicolon]); + }); }); }); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 87089d01ac..218616566f 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -261,6 +261,7 @@ export namespace Token { export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); + export const Tuple = (text: string) => createToken(text, 'entity.name.variable.tuple.cs'); } export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); From 30dd20c43f436123145d868f36f4c0f1066d51ff Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 12:27:35 -0800 Subject: [PATCH 047/192] Add support for alias-qualified names --- syntaxes/csharp-new.json | 46 ++++++-- syntaxes/syntax.md | 24 ++-- test/syntaxes/type-names.test.syntax.ts | 151 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 4 files changed, 198 insertions(+), 24 deletions(-) create mode 100644 test/syntaxes/type-names.test.syntax.ts diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index c7d6799c71..24c29e498f 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -444,14 +444,14 @@ ] }, "delegate-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?:\\())", + "begin": "(?=(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?:\\())", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?=\\()", + "match": "(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?=\\()", "captures": { "1": { "patterns": [ @@ -764,14 +764,14 @@ ] }, "field-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "begin": "(?(?:\\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", + "begin": "(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", "beginCaptures": { "1": { "patterns": [ @@ -816,14 +816,14 @@ ] }, "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", "captures": { "1": { "patterns": [ @@ -861,14 +861,14 @@ ] }, "indexer-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", + "match": "(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", "captures": { "1": { "patterns": [ @@ -915,14 +915,14 @@ ] }, "event-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", "captures": { "1": { "patterns": [ @@ -1032,14 +1032,14 @@ ] }, "method-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", + "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", "captures": { "1": { "patterns": [ @@ -1522,6 +1522,17 @@ }, "type-name": { "patterns": [ + { + "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(\\:\\:)", + "captures": { + "1": { + "name": "entity.name.type.alias.cs" + }, + "2": { + "name": "punctuation.separator.coloncolon.cs" + } + } + }, { "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(\\.)", "captures": { @@ -1533,6 +1544,17 @@ } } }, + { + "match": "(\\.)\\s*([_$[:alpha:]][_$[:alnum:]]*)", + "captures": { + "1": { + "name": "punctuation.accessor.cs" + }, + "2": { + "name": "storage.type.cs" + } + } + }, { "name": "storage.type.cs", "match": "[_$[:alpha:]][_$[:alnum:]]*" diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 814e76cc36..6ddfc95feb 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -44,12 +44,12 @@ #### Type name -* Expression: `(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` +* Expression: `(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Matches: `System.Collections.Generic.Dictionary, System.List>>` #### Delegate declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private)\s+)*)(?(?:\b(?:delegate)))\s+(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*<\s*(?:(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\s*(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\s*>\s*)?))\s*(?:\())` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private)\s+)*)(?(?:\b(?:delegate)))\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*<\s*(?:(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\s*(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\s*>\s*)?))\s*(?:\())` * Matches: `delegate (int, int) Foo();` #### Field declaratiosn @@ -57,20 +57,20 @@ Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` - * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * First field name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?!=>)(?:;|=)` #### Event declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Event keyword: `\s*\b(?event)\b` - * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` * End: `\s*(?=\{|;|$)` @@ -79,28 +79,28 @@ Further field names are matched by looking for identifiers, #punctuation-comma, Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. -* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` +* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` * Break down: * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` * End: `\s*(?:\{|=>|$))` #### Indexer declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` - * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?this)` * End: `\s*(?:\[))` #### Method declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` - * Type name: `\s*(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\.\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Method name and type parameters: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` * End: `\s*(?:\[))` \ No newline at end of file diff --git a/test/syntaxes/type-names.test.syntax.ts b/test/syntaxes/type-names.test.syntax.ts new file mode 100644 index 0000000000..c265969464 --- /dev/null +++ b/test/syntaxes/type-names.test.syntax.ts @@ -0,0 +1,151 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Type names", () => { + it("built-in type - object", () => { + + const input = Input.InClass(`object x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("object"), + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("qualified name - System.Object", () => { + + const input = Input.InClass(`System.Object x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Object"), + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("globally-qualified name - global::System.Object", () => { + + const input = Input.InClass(`global::System.Object x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.AliasName("global"), + Token.Puncuation.ColonColon, + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Object"), + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("tuple type - (int, int)", () => { + + const input = Input.InClass(`(int, int) x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("generic type - List", () => { + + const input = Input.InClass(`List x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("generic type with tuple - List<(int, int)>", () => { + + const input = Input.InClass(`List<(int, int)> x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Puncuation.TypeParameters.End, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("generic type with multiple parameters - Dictionary", () => { + + const input = Input.InClass(`Dictionary x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Dictionary"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("qualified generic type - System.Collections.Generic.List", () => { + + const input = Input.InClass(`System.Collections.Generic.List x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("System"), + Token.Puncuation.Accessor, + Token.Type("Collections"), + Token.Puncuation.Accessor, + Token.Type("Generic"), + Token.Puncuation.Accessor, + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + + it("generic type with nested type - List.Enumerator", () => { + + const input = Input.InClass(`List.Enumerator x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("List"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Type("Enumerator"), + Token.Identifiers.FieldName("x"), + Token.Puncuation.Semicolon]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 218616566f..ff7f66e96c 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -227,6 +227,7 @@ export namespace Token { export const CloseBracket = createToken(']', 'punctuation.squarebracket.close.cs'); export const CloseParen = createToken(')', 'punctuation.parenthesis.close.cs'); export const Colon = createToken(':', 'punctuation.separator.colon.cs'); + export const ColonColon = createToken('::', 'punctuation.separator.coloncolon.cs'); export const Comma = createToken(',', 'punctuation.separator.comma.cs'); export const OpenBrace = createToken('{', 'punctuation.curlybrace.open.cs'); export const OpenBracket = createToken('[', 'punctuation.squarebracket.open.cs'); From c293861fd00ffe31e43e0508d43f0e96558d1690 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 13:09:06 -0800 Subject: [PATCH 048/192] Add support for constructors --- syntaxes/csharp-new.json | 60 +++++++++++++++-- syntaxes/syntax.md | 14 +++- test/syntaxes/constructors.test.syntax.ts | 81 +++++++++++++++++++++++ 3 files changed, 147 insertions(+), 8 deletions(-) create mode 100644 test/syntaxes/constructors.test.syntax.ts diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index 24c29e498f..a060c157ff 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -87,6 +87,9 @@ { "include": "#method-declaration" }, + { + "include": "#constructor-declaration" + }, { "include": "#punctuation-semicolon" } @@ -115,6 +118,9 @@ { "include": "#method-declaration" }, + { + "include": "#constructor-declaration" + }, { "include": "#punctuation-semicolon" } @@ -1072,13 +1078,53 @@ "include": "#expression-body" }, { - "begin": "(?=\\{)", - "end": "(?=\\})", - "patterns": [ - { - "include": "#block" + "include": "#block" + } + ] + }, + "constructor-declaration": { + "begin": "(?=(?:(?(?:(?:public|protected|internal|private|extern|static)\\s+)+)\\s*(?:[_$[:alpha:]][_$[:alnum:]]*)|(?:[_$[:alpha:]][_$[:alnum:]]*))\\s*(?:\\())", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "(?(?:(?:public|protected|internal|private|extern|static)\\s+)+)\\s*(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(public|protected|internal|private|extern|static)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "name": "entity.name.function.cs" } - ] + } + }, + { + "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", + "captures": { + "1": { + "name": "entity.name.function.cs" + } + } + }, + { + "include": "#parenthesized-parameter-list" + }, + { + "include": "#expression-body" + }, + { + "include": "#block" } ] }, @@ -1493,7 +1539,7 @@ { "match": "(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(?:\\s+(?[_$[:alpha:]][_$[:alnum:]]*))?", "captures": { - "1":{ + "1": { "patterns": [ { "include": "#type" diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 6ddfc95feb..fc96dd1876 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -103,4 +103,16 @@ if you consider that regular expressions don't know that "class" is a keyword. T * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Method name and type parameters: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` - * End: `\s*(?:\[))` \ No newline at end of file + * End: `\s*(?:\())` + +#### Constructor declarations + +Note that the match for constructor declarations contains an `|`. This allows for constructors with and without storage modifiers. +If the storage modifiers are optional (i.e. using a `*` rather than a `+`), this match conflicts with fields where there is a modifier +followed by a tuple type (e.g. `private (int, int) x;`). + +* Expression: `(?=(?:(?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s*(?:[_$[:alpha:]][_$[:alnum:]]*)|(?:[_$[:alpha:]][_$[:alnum:]]*))\s*(?:\())` +* Break down: + * Storage modifiers: `(?(?:(?:public|protected|internal|private|extern|static)\s+)*)` + * Name: `\s+[_$[:alpha:]][_$[:alnum:]]*` + * End: `\s*(?:\())` \ No newline at end of file diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts new file mode 100644 index 0000000000..31e789d453 --- /dev/null +++ b/test/syntaxes/constructors.test.syntax.ts @@ -0,0 +1,81 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Constructors", () => { + it("instance constructor with no parameters", () => { + + const input = Input.InClass(`TestClass() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("instance constructor with two parameters", () => { + + const input = Input.InClass(` +TestClass(int x, int y) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("instance constructor with expression body", () => { + + const input = Input.InClass(`TestClass(int x, int y) => Foo();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.ReadWrite("Foo"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon]); + }); + + it("static constructor no parameters", () => { + + const input = Input.InClass(`TestClass() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + }); +}); \ No newline at end of file From 3ebbb0c237711f613e11a26fa72bc1d8fe271fac Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 13:22:10 -0800 Subject: [PATCH 049/192] Add support for destructors --- syntaxes/csharp-new.json | 35 +++++++++++++++++++ syntaxes/syntax.md | 23 +++++++++++-- test/syntaxes/destructors.test.syntax.ts | 44 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 ++ 4 files changed, 101 insertions(+), 3 deletions(-) create mode 100644 test/syntaxes/destructors.test.syntax.ts diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index a060c157ff..bbafac7ca9 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -90,6 +90,9 @@ { "include": "#constructor-declaration" }, + { + "include": "#destructor-declaration" + }, { "include": "#punctuation-semicolon" } @@ -121,6 +124,9 @@ { "include": "#constructor-declaration" }, + { + "include": "#destructor-declaration" + }, { "include": "#punctuation-semicolon" } @@ -1128,6 +1134,35 @@ } ] }, + "destructor-declaration": { + "begin": "(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\())", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "(~)([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", + "captures": { + "1": { + "name": "punctuation.tilde.cs" + }, + "2": { + "name": "entity.name.function.cs" + } + } + }, + { + "include": "#parenthesized-parameter-list" + }, + { + "include": "#expression-body" + }, + { + "include": "#block" + } + ] + }, "block": { "begin": "\\{", "beginCaptures": { diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index fc96dd1876..a65cab4ba1 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,13 +1,21 @@ ## TODO List: -* Declaratiosn: +* Declarations: * Explicitly-implemented interface members - * Constructor declarations - * Destructor declarations * Operator declarations * Conversion operator declarations * Interface members +* Statements/Expressions: + * Local variable declarations + * Method calls + * Element access + * LINQ + * switch + * for loops + * foreach loops + * lambda expressions + * Lots of refinement and tests to ensure proper highlighting while typing ## Important regular expressions: @@ -115,4 +123,13 @@ followed by a tuple type (e.g. `private (int, int) x;`). * Break down: * Storage modifiers: `(?(?:(?:public|protected|internal|private|extern|static)\s+)*)` * Name: `\s+[_$[:alpha:]][_$[:alnum:]]*` + * End: `\s*(?:\())` + +#### Destructor declarations + +Note that structs do not allow destructor declarations, but we'll try to highlight them anyway. + +* Expression: `(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\())` +* Break down: + * Name: `~(?:[_$[:alpha:]][_$[:alnum:]]*)` * End: `\s*(?:\())` \ No newline at end of file diff --git a/test/syntaxes/destructors.test.syntax.ts b/test/syntaxes/destructors.test.syntax.ts new file mode 100644 index 0000000000..b1aca85696 --- /dev/null +++ b/test/syntaxes/destructors.test.syntax.ts @@ -0,0 +1,44 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Destructor", () => { + it("declaration", () => { + + const input = Input.InClass(`~TestClass() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Puncuation.Tilde, + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("with expression body", () => { + + const input = Input.InClass(`~TestClass() => Foo();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Puncuation.Tilde, + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.ReadWrite("Foo"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index ff7f66e96c..51e4a13236 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -255,6 +255,8 @@ export namespace Token { export const Begin = createToken('<', 'punctuation.definition.typeparameters.begin.cs'); export const End = createToken('>', 'punctuation.definition.typeparameters.end.cs'); } + + export const Tilde = createToken('~', 'punctuation.tilde.cs'); } export namespace Variables { From 2c9ee01423055779d41bcf2e74b9e5c7c4f1803e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 14:26:22 -0800 Subject: [PATCH 050/192] Add support for operator declarations --- syntaxes/csharp-new.json | 86 ++- syntaxes/syntax.md | 14 +- .../interpolated-strings.test.syntax.ts | 1 + test/syntaxes/operators.test.syntax.ts | 653 ++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 29 + 5 files changed, 778 insertions(+), 5 deletions(-) create mode 100644 test/syntaxes/operators.test.syntax.ts diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index bbafac7ca9..855eacc858 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -93,6 +93,9 @@ { "include": "#destructor-declaration" }, + { + "include": "#operator-declaration" + }, { "include": "#punctuation-semicolon" } @@ -127,6 +130,9 @@ { "include": "#destructor-declaration" }, + { + "include": "#operator-declaration" + }, { "include": "#punctuation-semicolon" } @@ -776,14 +782,14 @@ ] }, "field-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?:;|=))", + "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>|==)(?:;|=))", "end": "(?=;)", "patterns": [ { "include": "#comment" }, { - "begin": "(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>)(?=;|=)", + "begin": "(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>|==)(?=;|=)", "beginCaptures": { "1": { "patterns": [ @@ -1163,6 +1169,54 @@ } ] }, + "operator-declaration": { + "begin": "(?=(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?(?:\\b(?:operator)))\\s*(?(?:\\+|-|\\*|/|%|&|\\||\\^|\\<\\<|\\>\\>|==|!=|\\>|\\<|\\>=|\\<=|!|~|\\+\\+|--|true|false))\\s*(?:\\())", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?(?:\\b(?:operator)))\\s*(?(?:\\+|-|\\*|/|%|&|\\||\\^|\\<\\<|\\>\\>|==|!=|\\>|\\<|\\>=|\\<=|!|~|\\+\\+|--|true|false))\\s*(?=\\()", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(public|static|extern)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "3": { + "name": "keyword.other.operator.cs" + }, + "4": { + "name": "entity.name.function.cs" + } + } + }, + { + "include": "#parenthesized-parameter-list" + }, + { + "include": "#expression-body" + }, + { + "include": "#block" + } + ] + }, "block": { "begin": "\\{", "beginCaptures": { @@ -1383,10 +1437,38 @@ }, "expression-operators": { "patterns": [ + { + "name": "keyword.operator.bitwise.shift.cs", + "match": "<<|>>" + }, + { + "name": "keyword.operator.comparison.cs", + "match": "==|!=" + }, + { + "name": "keyword.operator.relational.cs", + "match": "<=|>=|<|>" + }, + { + "name": "keyword.operator.logical.cs", + "match": "\\!|&&|\\|\\|" + }, + { + "name": "keyword.operator.bitwise.cs", + "match": "\\&|~|\\^|\\|" + }, { "name": "keyword.operator.assignment.cs", "match": "\\=" }, + { + "name": "keyword.operator.decrement.cs", + "match": "--" + }, + { + "name": "keyword.operator.increment.cs", + "match": "\\+\\+" + }, { "name": "keyword.operator.arithmetic.cs", "match": "%|\\*|/|-|\\+" diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index a65cab4ba1..722c029732 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -2,7 +2,6 @@ * Declarations: * Explicitly-implemented interface members - * Operator declarations * Conversion operator declarations * Interface members @@ -65,7 +64,7 @@ Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>)(?:;|=))` +* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>|==)(?:;|=))` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` @@ -132,4 +131,13 @@ Note that structs do not allow destructor declarations, but we'll try to highlig * Expression: `(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\())` * Break down: * Name: `~(?:[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\())` \ No newline at end of file + * End: `\s*(?:\())` + +#### Operator declarations + +* Expression: `(?=(?(?:(?:public|static|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s*(?(?:\b(?:operator)))\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s*(?:\())` +* Break down: + * Storage modifiers: `(?(?:(?:public|static|extern)\s+)*)` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * Operator keyword: `\s*(?(?:\b(?:operator)))` + * Operator: `\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))` diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index 3f1aa17dfb..524f3538e4 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -120,6 +120,7 @@ world!";`); // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. Token.IllegalNewLine("o"), Token.Variables.ReadWrite("world"), + Token.Operators.Logical.Not, Token.Puncuation.String.Begin, Token.IllegalNewLine(";")]); }); diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts new file mode 100644 index 0000000000..e6ca7b5611 --- /dev/null +++ b/test/syntaxes/operators.test.syntax.ts @@ -0,0 +1,653 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Operators", () => { + it("unary +", () => { + + const input = Input.InClass(`public static int operator +(int value) { return +value; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("+"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary -", () => { + + const input = Input.InClass(`public static int operator -(int value) { return -value; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("-"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Operators.Arithmetic.Subtraction, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary !", () => { + + const input = Input.InClass(`public static bool operator !(int value) { return value == 0; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("!"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("value"), + Token.Operators.Relational.Equals, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary ~", () => { + + const input = Input.InClass(`public static int operator ~(int value) { return ~value; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("~"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Operators.Bitwise.BitwiseComplement, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary ++", () => { + + const input = Input.InClass(`public static int operator ++(int value) { return ++value; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("++"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Operators.Increment, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary --", () => { + + const input = Input.InClass(`public static int operator --(int value) { return --value; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("--"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Operators.Decrement, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary true", () => { + + const input = Input.InClass(`public static int operator true(int value) { return value != 0; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("true"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("value"), + Token.Operators.Relational.NotEqual, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("unary false", () => { + + const input = Input.InClass(`public static int operator false(int value) { return value == 0; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("false"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("value"), + Token.Operators.Relational.Equals, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary +", () => { + + const input = Input.InClass(`public static int operator +(int x, int y) { return x + y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("+"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary -", () => { + + const input = Input.InClass(`public static int operator -(int x, int y) { return x - y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("-"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Subtraction, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary *", () => { + + const input = Input.InClass(`public static int operator *(int x, int y) { return x * y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("*"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Multiplication, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary /", () => { + + const input = Input.InClass(`public static int operator /(int x, int y) { return x / y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("/"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Division, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary %", () => { + + const input = Input.InClass(`public static int operator %(int x, int y) { return x % y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("%"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Arithmetic.Remainder, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary &", () => { + + const input = Input.InClass(`public static int operator &(int x, int y) { return x & y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("&"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Bitwise.And, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary |", () => { + + const input = Input.InClass(`public static int operator |(int x, int y) { return x | y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("|"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Bitwise.Or, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary ^", () => { + + const input = Input.InClass(`public static int operator ^(int x, int y) { return x ^ y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("^"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Bitwise.ExclusiveOr, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary <<", () => { + + const input = Input.InClass(`public static int operator <<(int x, int y) { return x << y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("<<"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Bitwise.ShiftLeft, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary >>", () => { + + const input = Input.InClass(`public static int operator >>(int x, int y) { return x >> y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName(">>"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Bitwise.ShiftRight, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary ==", () => { + + const input = Input.InClass(`public static bool operator ==(int x, int y) { return x == y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("=="), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.Equals, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary !=", () => { + + const input = Input.InClass(`public static bool operator !=(int x, int y) { return x != y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("!="), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.NotEqual, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary >", () => { + + const input = Input.InClass(`public static bool operator >(int x, int y) { return x > y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName(">"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.GreaterThan, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary <", () => { + + const input = Input.InClass(`public static bool operator <(int x, int y) { return x < y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("<"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.LessThan, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary >=", () => { + + const input = Input.InClass(`public static bool operator >=(int x, int y) { return x >= y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName(">="), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.GreaterThanOrEqual, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("binary <=", () => { + + const input = Input.InClass(`public static bool operator <=(int x, int y) { return x <= y; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("bool"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("<="), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("y"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.LessThanOrEqual, + Token.Variables.ReadWrite("y"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("with expression body", () => { + + const input = Input.InClass(`public static int operator +(int value) => +value;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Type("int"), + Token.Keywords.Operator, + Token.Identifiers.MethodName("+"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("value"), + Token.Puncuation.CloseParen, + Token.Operators.Arrow, + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("value"), + Token.Puncuation.Semicolon]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 51e4a13236..3778e4ab73 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -179,6 +179,7 @@ export namespace Token { export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); + export const Operator = createToken('operator', 'keyword.other.operator.cs'); export const Remove = createToken('remove', 'keyword.other.remove.cs'); export const Return = createToken('return', 'keyword.control.flow.cs'); export const Set = createToken('set', 'keyword.other.set.cs'); @@ -219,6 +220,34 @@ export namespace Token { } export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); + + export namespace Bitwise { + export const And = createToken('&', 'keyword.operator.bitwise.cs'); + export const BitwiseComplement = createToken('~', 'keyword.operator.bitwise.cs'); + export const ExclusiveOr = createToken('^', 'keyword.operator.bitwise.cs'); + export const Or = createToken('|', 'keyword.operator.bitwise.cs'); + export const ShiftLeft = createToken('<<', 'keyword.operator.bitwise.shift.cs'); + export const ShiftRight = createToken('>>', 'keyword.operator.bitwise.shift.cs'); + } + + export const Decrement = createToken('--', 'keyword.operator.decrement.cs'); + export const Increment = createToken('++', 'keyword.operator.increment.cs'); + + export namespace Logical { + export const And = createToken('&&', 'keyword.operator.logical.cs'); + export const Not = createToken('!', 'keyword.operator.logical.cs'); + export const Or = createToken('||', 'keyword.operator.logical.cs'); + } + + export namespace Relational { + export const Equals = createToken('==', 'keyword.operator.comparison.cs'); + export const NotEqual = createToken('!=', 'keyword.operator.comparison.cs'); + + export const LessThan = createToken('<', 'keyword.operator.relational.cs'); + export const LessThanOrEqual = createToken('<=', 'keyword.operator.relational.cs'); + export const GreaterThan = createToken('>', 'keyword.operator.relational.cs'); + export const GreaterThanOrEqual = createToken('>=', 'keyword.operator.relational.cs'); + } } export namespace Puncuation { From b782dced4e1f785311d07d071fe1ea4ce0ea9f26 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 14:40:32 -0800 Subject: [PATCH 051/192] Add support for conversion operators --- syntaxes/csharp-new.json | 71 ++++++++++++++++++++++++++ syntaxes/syntax.md | 12 ++++- test/syntaxes/operators.test.syntax.ts | 48 +++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 + 4 files changed, 132 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index 855eacc858..daac973894 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -96,6 +96,9 @@ { "include": "#operator-declaration" }, + { + "include": "#conversion-operator-declaration" + }, { "include": "#punctuation-semicolon" } @@ -133,6 +136,9 @@ { "include": "#operator-declaration" }, + { + "include": "#conversion-operator-declaration" + }, { "include": "#punctuation-semicolon" } @@ -1217,6 +1223,71 @@ } ] }, + "conversion-operator-declaration": { + "begin": "(?=(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:\\b(?:explicit|implicit)))\\s*(?(?:\\b(?:operator)))\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?:\\())", + "end": "(?=\\}|;)", + "patterns": [ + { + "include": "#comment" + }, + { + "match": "(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:\\b(?:explicit|implicit)))\\s*(?(?:\\b(?:operator)))\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?=\\()", + "captures": { + "1": { + "patterns": [ + { + "match": "\\b(public|static|extern)\\b", + "captures": { + "1": { + "name": "storage.modifier.cs" + } + } + } + ] + }, + "2": { + "patterns": [ + { + "match": "\\b(explicit)\\b", + "captures": { + "1": { + "name": "keyword.other.explicit.cs" + } + } + }, + { + "match": "\\b(implicit)\\b", + "captures": { + "1": { + "name": "keyword.other.implicit.cs" + } + } + } + ] + }, + "3": { + "name": "keyword.other.operator.cs" + }, + "4": { + "patterns": [ + { + "include": "#type" + } + ] + } + } + }, + { + "include": "#parenthesized-parameter-list" + }, + { + "include": "#expression-body" + }, + { + "include": "#block" + } + ] + }, "block": { "begin": "\\{", "beginCaptures": { diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 722c029732..ff8820371e 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -2,7 +2,6 @@ * Declarations: * Explicitly-implemented interface members - * Conversion operator declarations * Interface members * Statements/Expressions: @@ -141,3 +140,14 @@ Note that structs do not allow destructor declarations, but we'll try to highlig * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Operator keyword: `\s*(?(?:\b(?:operator)))` * Operator: `\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))` + * End: `\s*(?:\())` + +#### Conversion operator declarations + +* Expression: `(?=(?(?:(?:public|static|extern)\s+)*)\s*(?(?:\b(?:explicit|implicit)))\s*(?(?:\b(?:operator)))\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s*(?:\())` +* Break down: + * Storage modifiers: `(?(?:(?:public|static|extern)\s+)*)` + * Explicit or implicit: `\s*(?(?:\b(?:explicit|implicit)))` + * Operator keyword: `\s*(?(?:\b(?:operator)))` + * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` + * End: `\s*(?:\())` diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts index e6ca7b5611..a90e3a4031 100644 --- a/test/syntaxes/operators.test.syntax.ts +++ b/test/syntaxes/operators.test.syntax.ts @@ -629,6 +629,54 @@ describe("Grammar", () => { Token.Puncuation.CloseBrace]); }); + it("implicit conversion", () => { + + const input = Input.InClass(`public static implicit operator bool(int x) { return x != 0; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Keywords.Implicit, + Token.Keywords.Operator, + Token.Type("bool"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.NotEqual, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("explicit conversion", () => { + + const input = Input.InClass(`public static explicit operator bool(int x) { return x != 0; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Static, + Token.Keywords.Explicit, + Token.Keywords.Operator, + Token.Type("bool"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Return, + Token.Variables.ReadWrite("x"), + Token.Operators.Relational.NotEqual, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + it("with expression body", () => { const input = Input.InClass(`public static int operator +(int value) => +value;`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 3778e4ab73..1ef8efb291 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -174,8 +174,10 @@ export namespace Token { export const Do = createToken('do', 'keyword.control.loop.cs'); export const Enum = createToken('enum', 'keyword.other.enum.cs'); export const Event = createToken('event', 'keyword.other.event.cs'); + export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); + export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); From b81b8948d0a82069041600bfce622d21e3abff92 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 3 Jan 2017 15:28:14 -0800 Subject: [PATCH 052/192] Add support for verbatim string literals --- syntaxes/csharp-new.json | 37 +++++- .../interpolated-strings.test.syntax.ts | 27 ++++ test/syntaxes/string-literals.test.syntax.ts | 119 ++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 + 4 files changed, 183 insertions(+), 2 deletions(-) create mode 100644 test/syntaxes/string-literals.test.syntax.ts diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index daac973894..63105f23e3 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -1365,6 +1365,9 @@ } }, "patterns": [ + { + "include": "#string-character-escape" + }, { "include": "#interpolation" } @@ -1378,13 +1381,16 @@ "name": "punctuation.definition.string.begin.cs" } }, - "end": "\"", + "end": "\"(?=[^\"])", "endCaptures": { "0": { "name": "punctuation.definition.string.end.cs" } }, "patterns": [ + { + "include": "#verbatim-string-character-escape" + }, { "include": "#interpolation" } @@ -1426,6 +1432,9 @@ }, { "include": "#string-literal" + }, + { + "include": "#verbatim-string-literal" } ] }, @@ -1481,7 +1490,7 @@ }, "string-literal": { "name": "string.quoted.double.cs", - "begin": "\"", + "begin": "(? { + + const input = Input.InClass(`string test = $@"hello {one} ""world"" {two}!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Literals.String("hello "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("one"), + Token.Puncuation.Interpolation.End, + Token.Literals.String(" "), + Token.Literals.CharacterEscape("\"\""), + Token.Literals.String("world"), + Token.Literals.CharacterEscape("\"\""), + Token.Literals.String(" "), + Token.Puncuation.Interpolation.Begin, + Token.Variables.ReadWrite("two"), + Token.Puncuation.Interpolation.End, + Token.Literals.String("!"), + Token.Puncuation.InterpolatedString.End, + Token.Puncuation.Semicolon]); + }); + it("break across two lines with two interpolations (verbatim)", () => { const input = Input.InClass(` diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts new file mode 100644 index 0000000000..c1cea3d03c --- /dev/null +++ b/test/syntaxes/string-literals.test.syntax.ts @@ -0,0 +1,119 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Literals - string", () => { + it("simple", () => { + + const input = Input.InClass(`string test = "hello world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.Begin, + Token.Literals.String("hello world!"), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon]); + }); + + it("escaped double-quote", () => { + + const input = Input.InClass(`string test = "hello \\"world!\\"";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.Begin, + Token.Literals.String("hello "), + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String("world!"), + Token.Literals.CharacterEscape("\\\""), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon]); + }); + + it("line break before close quote", () => { + + const input = Input.InClass( +`string test = "hello +world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.Begin, + Token.Literals.String("hello"), + + // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. + Token.IllegalNewLine(" "), + Token.Variables.ReadWrite("world"), + Token.Operators.Logical.Not, + Token.Puncuation.String.Begin, + Token.IllegalNewLine(";")]); + }); + + it("simple (verbatim)", () => { + + const input = Input.InClass(`string test = @"hello world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.VerbatimBegin, + Token.Literals.String("hello world!"), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon]); + }); + + it("escaped double-quote (verbatim)", () => { + + const input = Input.InClass("string test = @\"hello \"\"world!\"\"\";"); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.VerbatimBegin, + Token.Literals.String("hello "), + Token.Literals.CharacterEscape("\"\""), + Token.Literals.String("world!"), + Token.Literals.CharacterEscape("\"\""), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon]); + }); + + it("line break before close quote (verbatim)", () => { + + const input = Input.InClass( +`string test = @"hello +world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Puncuation.String.VerbatimBegin, + Token.Literals.String("hello "), + Token.Literals.String("world!"), + Token.Puncuation.String.End, + Token.Puncuation.Semicolon]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 1ef8efb291..e7b76cf25d 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -207,6 +207,7 @@ export namespace Token { export const Hexadecimal = (text: string) => createToken(text, 'constant.numeric.hex.cs'); } + export const CharacterEscape = (text: string) => createToken(text, 'constant.character.escape.cs'); export const String = (text: string) => createToken(text, 'string.quoted.double.cs'); } @@ -280,6 +281,7 @@ export namespace Token { export namespace String { export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); export const End = createToken('"', 'punctuation.definition.string.end.cs'); + export const VerbatimBegin = createToken('@"', 'punctuation.definition.string.begin.cs'); } export namespace TypeParameters { From 9105b28c8f3546bc32ee83bddc4d9ee439d0ad60 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 4 Jan 2017 08:26:21 -0800 Subject: [PATCH 053/192] Add support for explicitly-implemented interface methods --- syntaxes/csharp-new.json | 18 +++++++++++-- syntaxes/syntax.md | 39 ++++++++++++++++++---------- test/syntaxes/methods.test.syntax.ts | 18 +++++++++++++ 3 files changed, 60 insertions(+), 15 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index 63105f23e3..c892f86b1c 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -1056,14 +1056,14 @@ ] }, "method-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", + "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", "captures": { "1": { "patterns": [ @@ -1085,6 +1085,20 @@ ] }, "3": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "4": { + "patterns": [ + { + "include": "#punctuation-accessor" + } + ] + }, + "5": { "name": "entity.name.function.cs" } } diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index ff8820371e..58ea9676e9 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,18 +1,29 @@ ## TODO List: * Declarations: - * Explicitly-implemented interface members + * Explicitly-implemented interface members (properties, events, indexers) * Interface members + * Constructor initializers * Statements/Expressions: * Local variable declarations * Method calls * Element access - * LINQ + * query expressions * switch * for loops * foreach loops - * lambda expressions + * lambda expressions and anonymous functions + * array, collection and object initializers + * casts + * compound assignement + * label statements + * jump statements (break, continue, goto, throw) + * try statements + * checked and unchecked statements and expressions + * lock statements + * using statements + * yield statement * Lots of refinement and tests to ensure proper highlighting while typing @@ -91,7 +102,7 @@ if you consider that regular expressions don't know that "class" is a keyword. T * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\{|=>|$))` + * End: `\s*(?:\{|=>|$)` #### Indexer declarations @@ -100,16 +111,18 @@ if you consider that regular expressions don't know that "class" is a keyword. T * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Property name: `\s+(?this)` - * End: `\s*(?:\[))` + * End: `\s*(?:\[)` #### Method declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` + * Break down: - * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\s+)*)` + * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Method name and type parameters: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` - * End: `\s*(?:\())` + * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` + * Method name and type parameters: `(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` + * End: `\s*(?:\()` #### Constructor declarations @@ -121,7 +134,7 @@ followed by a tuple type (e.g. `private (int, int) x;`). * Break down: * Storage modifiers: `(?(?:(?:public|protected|internal|private|extern|static)\s+)*)` * Name: `\s+[_$[:alpha:]][_$[:alnum:]]*` - * End: `\s*(?:\())` + * End: `\s*(?:\()` #### Destructor declarations @@ -130,7 +143,7 @@ Note that structs do not allow destructor declarations, but we'll try to highlig * Expression: `(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\())` * Break down: * Name: `~(?:[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\())` + * End: `\s*(?:\()` #### Operator declarations @@ -140,7 +153,7 @@ Note that structs do not allow destructor declarations, but we'll try to highlig * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Operator keyword: `\s*(?(?:\b(?:operator)))` * Operator: `\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))` - * End: `\s*(?:\())` + * End: `\s*(?:\()` #### Conversion operator declarations @@ -150,4 +163,4 @@ Note that structs do not allow destructor declarations, but we'll try to highlig * Explicit or implicit: `\s*(?(?:\b(?:explicit|implicit)))` * Operator keyword: `\s*(?(?:\b(?:operator)))` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * End: `\s*(?:\())` + * End: `\s*(?:\()` diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index a3356a5116..14c4ea53ee 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -73,5 +73,23 @@ int Add(int x, int y) Token.Variables.ReadWrite("y"), Token.Puncuation.Semicolon]); }); + + it("explicitly-implemented interface method", () => { + + const input = Input.InClass(`string IFoo.GetString();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Type("IFoo"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.MethodName("GetString"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon]); + }); }); }); \ No newline at end of file From 673044d2dec403b78ff395dd67528362a5be829b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 4 Jan 2017 08:51:52 -0800 Subject: [PATCH 054/192] Add support for explicitly-implemented interface indexers --- syntaxes/csharp-new.json | 18 ++++++++++++++++-- syntaxes/syntax.md | 7 ++++--- test/syntaxes/indexers.test.syntax.ts | 20 ++++++++++++++++++++ test/syntaxes/methods.test.syntax.ts | 2 +- 4 files changed, 41 insertions(+), 6 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index c892f86b1c..95a64ed786 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -885,14 +885,14 @@ ] }, "indexer-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?:\\[))", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?this)\\s*(?=\\[)", + "match": "(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?=\\[)", "captures": { "1": { "patterns": [ @@ -914,6 +914,20 @@ ] }, "3": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "4": { + "patterns": [ + { + "include": "#punctuation-accessor" + } + ] + }, + "5": { "name": "keyword.other.this.cs" } } diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 58ea9676e9..6cc8510e16 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,7 +1,7 @@ ## TODO List: * Declarations: - * Explicitly-implemented interface members (properties, events, indexers) + * Explicitly-implemented interface members (properties, events) * Interface members * Constructor initializers @@ -106,11 +106,12 @@ if you consider that regular expressions don't know that "class" is a keyword. T #### Indexer declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?this)\s*(?:\[))` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?this)\s*(?:\[))` * Break down: * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Property name: `\s+(?this)` + * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` + * Indexer name: `(?this)` * End: `\s*(?:\[)` #### Method declarations diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index b3a4b987c2..896043d74a 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -40,5 +40,25 @@ public string this[int index] Token.Puncuation.CloseBrace, Token.Puncuation.CloseBrace]); }); + + it("explicitly-implemented interface member", () => { + + const input = Input.InClass(`string IFoo.this[int index];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Type("IFoo"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Keywords.This, + Token.Puncuation.OpenBracket, + Token.Type("int"), + Token.Variables.Parameter("index"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 14c4ea53ee..5bac3ae3f3 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -74,7 +74,7 @@ int Add(int x, int y) Token.Puncuation.Semicolon]); }); - it("explicitly-implemented interface method", () => { + it("explicitly-implemented interface member", () => { const input = Input.InClass(`string IFoo.GetString();`); const tokens = tokenize(input); From 8814cf6b57c7388756873b3a897d58c739fef881 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 4 Jan 2017 09:01:09 -0800 Subject: [PATCH 055/192] Add support for explicitly-implemented interface properties and events --- syntaxes/csharp-new.json | 44 ++++++++++++++++++++----- syntaxes/syntax.md | 25 +++++++------- test/syntaxes/events.test.syntax.ts | 22 +++++++++++++ test/syntaxes/indexers.test.syntax.ts | 2 +- test/syntaxes/properties.test.syntax.ts | 21 ++++++++++++ 5 files changed, 93 insertions(+), 21 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index 95a64ed786..10b3dcf8c9 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -840,14 +840,14 @@ ] }, "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", + "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", + "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", "captures": { "1": { "patterns": [ @@ -869,6 +869,20 @@ ] }, "3": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "4": { + "patterns": [ + { + "include": "#punctuation-accessor" + } + ] + }, + "5": { "name": "entity.name.variable.property.cs" } } @@ -885,14 +899,14 @@ ] }, "indexer-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?:\\[))", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?:\\[))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?=\\[)", + "match": "(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?=\\[)", "captures": { "1": { "patterns": [ @@ -953,14 +967,14 @@ ] }, "event-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\s+)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", + "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", "captures": { "1": { "patterns": [ @@ -985,6 +999,20 @@ ] }, "4": { + "patterns": [ + { + "include": "#type" + } + ] + }, + "5": { + "patterns": [ + { + "include": "#punctuation-accessor" + } + ] + }, + "6": { "patterns": [ { "match": "[_$[:alpha:]][_$[:alnum:]]*", @@ -1070,14 +1098,14 @@ ] }, "method-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", + "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", "captures": { "1": { "patterns": [ diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 6cc8510e16..5cdedc9780 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,7 +1,6 @@ ## TODO List: * Declarations: - * Explicitly-implemented interface members (properties, events) * Interface members * Constructor initializers @@ -83,32 +82,34 @@ Further field names are matched by looking for identifiers, #punctuation-comma, #### Event declarations -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*\b(?event)\b\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s*\b(?event)\b\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` * Break down: - * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` + * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)` * Event keyword: `\s*\b(?event)\b` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Event name(s): `\s+(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` - * End: `\s*(?=\{|;|$)` + * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` + * Event name(s): `(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` + * End: `\s*(?:\{|;|$)` #### Property declarations Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. -* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` +* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` * Break down: * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` - * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\s+)*)` + * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Property name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)` + * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` + * Property name: `(?[_$[:alpha:]][_$[:alnum:]]*)` * End: `\s*(?:\{|=>|$)` #### Indexer declarations -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?this)\s*(?:\[))` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?this)\s*(?:\[))` * Break down: - * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\s+)*)` + * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` * Indexer name: `(?this)` @@ -116,10 +117,10 @@ if you consider that regular expressions don't know that "class" is a keyword. T #### Method declarations -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` * Break down: - * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b)*)` + * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` * Method name and type parameters: `(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index a9ffe0965d..e8962c1b19 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -103,5 +103,27 @@ public event Type Event Token.Puncuation.CloseBrace, Token.Puncuation.CloseBrace]); }); + + it("explicitly-implemented interface member", () => { + + const input = Input.InClass(`event EventHandler IFoo.Event { add; remove; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Event, + Token.Type("EventHandler"), + Token.Type("IFoo"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.EventName("Event"), + Token.Puncuation.OpenBrace, + Token.Keywords.Add, + Token.Puncuation.Semicolon, + Token.Keywords.Remove, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 896043d74a..9b135564a0 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -40,7 +40,7 @@ public string this[int index] Token.Puncuation.CloseBrace, Token.Puncuation.CloseBrace]); }); - + it("explicitly-implemented interface member", () => { const input = Input.InClass(`string IFoo.this[int index];`); diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index b4fb791622..795eafcf44 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -233,5 +233,26 @@ private bool prop2 => true;`); Token.Literals.Boolean.True, Token.Puncuation.Semicolon]); }); + + it("explicitly-implemented interface member", () => { + + const input = Input.InClass(`string IFoo.Bar { get; set; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Type("IFoo"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("string"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.PropertyName("Bar"), + Token.Puncuation.OpenBrace, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); }); }); \ No newline at end of file From 1ad05ee5879e29aef87810215a400bfe9aca11ea Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 4 Jan 2017 11:03:39 -0800 Subject: [PATCH 056/192] Add support for interface members --- syntaxes/csharp-new.json | 31 ++++++++++- syntaxes/syntax.md | 5 +- test/syntaxes/events.test.syntax.ts | 12 ++++ test/syntaxes/indexers.test.syntax.ts | 56 +++++++++++++++++++ test/syntaxes/methods.test.syntax.ts | 73 +++++++++++++++++++++++++ test/syntaxes/properties.test.syntax.ts | 44 +++++++++++++++ test/syntaxes/utils/tokenize.ts | 13 +++++ 7 files changed, 228 insertions(+), 6 deletions(-) diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json index 10b3dcf8c9..c4854af17f 100644 --- a/syntaxes/csharp-new.json +++ b/syntaxes/csharp-new.json @@ -61,6 +61,9 @@ }, { "include": "#struct-declaration" + }, + { + "include": "#punctuation-semicolon" } ] }, @@ -144,6 +147,25 @@ } ] }, + "interface-members": { + "patterns": [ + { + "include": "#event-declaration" + }, + { + "include": "#property-declaration" + }, + { + "include": "#indexer-declaration" + }, + { + "include": "#method-declaration" + }, + { + "include": "#punctuation-semicolon" + } + ] + }, "statement": { "patterns": [ { @@ -663,7 +685,7 @@ }, "patterns": [ { - "include": "#punctuation-semicolon" + "include": "#interface-members" } ] } @@ -1098,14 +1120,14 @@ ] }, "method-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?:\\())", + "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?)\\s*(?:\\())", "end": "(?=\\}|;)", "patterns": [ { "include": "#comment" }, { - "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?\\s*(?=\\()", + "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?)\\s*(?=\\()", "captures": { "1": { "patterns": [ @@ -1148,6 +1170,9 @@ { "include": "#parenthesized-parameter-list" }, + { + "include": "#generic-constraints" + }, { "include": "#expression-body" }, diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 5cdedc9780..94ebc81281 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,7 +1,6 @@ ## TODO List: * Declarations: - * Interface members * Constructor initializers * Statements/Expressions: @@ -117,13 +116,13 @@ if you consider that regular expressions don't know that "class" is a keyword. T #### Method declarations -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?\s*(?:\())` +* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)\s*(?:\())` * Break down: * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)` * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` - * Method name and type parameters: `(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` + * Method name and type parameters: `(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)` * End: `\s*(?:\()` #### Constructor declarations diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index e8962c1b19..91938891e7 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -125,5 +125,17 @@ public event Type Event Token.Puncuation.Semicolon, Token.Puncuation.CloseBrace]); }); + + it("declaration in interface", () => { + + const input = Input.InInterface(`event EventHandler Event;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Event, + Token.Type("EventHandler"), + Token.Identifiers.EventName("Event"), + Token.Puncuation.Semicolon]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 9b135564a0..f073400e5d 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -60,5 +60,61 @@ public string this[int index] Token.Puncuation.CloseBracket, Token.Puncuation.Semicolon]); }); + + it("declaration in interface", () => { + + const input = Input.InInterface(`string this[int index] { get; set; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Keywords.This, + Token.Puncuation.OpenBracket, + Token.Type("int"), + Token.Variables.Parameter("index"), + Token.Puncuation.CloseBracket, + Token.Puncuation.OpenBrace, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("declaration in interface (read-only)", () => { + + const input = Input.InInterface(`string this[int index] { get; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Keywords.This, + Token.Puncuation.OpenBracket, + Token.Type("int"), + Token.Variables.Parameter("index"), + Token.Puncuation.CloseBracket, + Token.Puncuation.OpenBrace, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("declaration in interface (write-only)", () => { + + const input = Input.InInterface(`string this[int index] { set; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Keywords.This, + Token.Puncuation.OpenBracket, + Token.Type("int"), + Token.Variables.Parameter("index"), + Token.Puncuation.CloseBracket, + Token.Puncuation.OpenBrace, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 5bac3ae3f3..6d6062dcdf 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -52,6 +52,26 @@ int Add(int x, int y) Token.Puncuation.CloseBrace]); }); + it("declaration in with generic constraints", () => { + + const input = Input.InClass(`TResult GetString(T arg) where T : TResult { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("TResult"), + Token.Identifiers.MethodName("GetString"), + Token.Puncuation.OpenParen, + Token.Type("T"), + Token.Variables.Parameter("arg"), + Token.Puncuation.CloseParen, + Token.Keywords.Where, + Token.Type("T"), + Token.Puncuation.Colon, + Token.Type("TResult"), + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + it("expression body", () => { const input = Input.InClass(`int Add(int x, int y) => x + y;`); @@ -91,5 +111,58 @@ int Add(int x, int y) Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); }); + + it("declaration in interface", () => { + + const input = Input.InInterface(`string GetString();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.MethodName("GetString"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon]); + }); + + it("declaration in interface with parameters", () => { + + const input = Input.InInterface(`string GetString(string format, params object[] args);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.MethodName("GetString"), + Token.Puncuation.OpenParen, + Token.Type("string"), + Token.Variables.Parameter("format"), + Token.Puncuation.Comma, + Token.Keywords.Modifiers.Params, + Token.Type("object"), + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, + Token.Variables.Parameter("args"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon]); + }); + + it("declaration in interface with generic constraints", () => { + + const input = Input.InInterface(`TResult GetString(T arg) where T : TResult;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("TResult"), + Token.Identifiers.MethodName("GetString"), + Token.Puncuation.OpenParen, + Token.Type("T"), + Token.Variables.Parameter("arg"), + Token.Puncuation.CloseParen, + Token.Keywords.Where, + Token.Type("T"), + Token.Puncuation.Colon, + Token.Type("TResult"), + Token.Puncuation.Semicolon]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 795eafcf44..7790ac2b83 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -254,5 +254,49 @@ private bool prop2 => true;`); Token.Puncuation.Semicolon, Token.Puncuation.CloseBrace]); }); + + it("declaration in interface", () => { + + const input = Input.InInterface(`string Bar { get; set; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.PropertyName("Bar"), + Token.Puncuation.OpenBrace, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("declaration in interface (read-only)", () => { + + const input = Input.InInterface(`string Bar { get; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.PropertyName("Bar"), + Token.Puncuation.OpenBrace, + Token.Keywords.Get, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); + + it("declaration in interface (write-only)", () => { + + const input = Input.InInterface(`string Bar { set; }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("string"), + Token.Identifiers.PropertyName("Bar"), + Token.Puncuation.OpenBrace, + Token.Keywords.Set, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index e7b76cf25d..9c11786827 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -78,6 +78,19 @@ class TestClass { return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); } + public static InInterface(input: string) { + let text = ` +interface TestInterface { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); + } + public static InMethod(input: string) { let text = ` class TestClass { From 1b1c4508bad2456974f1e1bc65730ef9d9a6d6e8 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Thu, 5 Jan 2017 14:40:52 -0800 Subject: [PATCH 057/192] Move to YAML to make it easier to define grammar --- build.ts | 23 + package.json | 4 + syntaxes/build/index.d.ts | 3 + syntaxes/build/package.json | 18 + syntaxes/build/tsconfig.json | 10 + syntaxes/csharp-new.json | 2014 -------------------- syntaxes/csharp.tmLanguage | 3113 +++++++++++++++++++++++++++++++ syntaxes/csharp.tmLanguage.yml | 1150 ++++++++++++ test/syntaxes/utils/tokenize.ts | 2 +- tsconfig.json | 1 + typings/plist/plist.d.ts | 3 + 11 files changed, 4326 insertions(+), 2015 deletions(-) create mode 100644 build.ts create mode 100644 syntaxes/build/index.d.ts create mode 100644 syntaxes/build/package.json create mode 100644 syntaxes/build/tsconfig.json delete mode 100644 syntaxes/csharp-new.json create mode 100644 syntaxes/csharp.tmLanguage create mode 100644 syntaxes/csharp.tmLanguage.yml create mode 100644 typings/plist/plist.d.ts diff --git a/build.ts b/build.ts new file mode 100644 index 0000000000..609f7247ef --- /dev/null +++ b/build.ts @@ -0,0 +1,23 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import * as yaml from 'js-yaml'; +import * as plist from 'plist'; + +function writePlistFile(grammar: any, fileName: string) { + const text = plist.build(grammar); + fs.writeFileSync(fileName, text, "utf8"); +} + +function readYaml(fileName: string) { + const text = fs.readFileSync(fileName, "utf8"); + return yaml.safeLoad(text); +} + +function buildGrammar() { + const tsGrammar = readYaml("syntaxes/csharp.tmLanguage.yml"); + + // Write csharp.tmLanguage + writePlistFile(tsGrammar, "syntaxes/csharp.tmLanguage"); +} + +buildGrammar(); \ No newline at end of file diff --git a/package.json b/package.json index 7e5fc7a759..112a6e80d4 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "compile": "tsc -p ./ && gulp tslint", "watch": "tsc -watch -p ./", "test": "node ./node_modules/vscode/bin/test", + "build-syntax": "node ./out/build.js", "test-syntax": "mocha --timeout 15000 --ui bdd ./out/test/syntaxes/*.test.syntax.js", "postinstall": "node ./node_modules/vscode/bin/install" }, @@ -46,6 +47,7 @@ "devDependencies": { "@types/chai": "^3.4.34", "@types/fs-extra": "0.0.35", + "@types/js-yaml": "^3.5.29", "@types/mkdirp": "^0.3.29", "@types/mocha": "^2.2.32", "@types/node": "^6.0.40", @@ -56,7 +58,9 @@ "gulp": "^3.9.1", "gulp-mocha": "^2.1.3", "gulp-tslint": "^4.3.0", + "js-yaml": "^3.7.0", "mocha": "^2.3.3", + "plist": "^2.0.1", "tslint": "^3.15.1", "tslint-microsoft-contrib": "^2.0.12", "typescript": "^2.0.3", diff --git a/syntaxes/build/index.d.ts b/syntaxes/build/index.d.ts new file mode 100644 index 0000000000..8f29ae685e --- /dev/null +++ b/syntaxes/build/index.d.ts @@ -0,0 +1,3 @@ +declare module "plist" { + export function build(json: any): string; +} \ No newline at end of file diff --git a/syntaxes/build/package.json b/syntaxes/build/package.json new file mode 100644 index 0000000000..eae417dc3b --- /dev/null +++ b/syntaxes/build/package.json @@ -0,0 +1,18 @@ +{ + "name": "build", + "version": "1.0.0", + "private": true, + "description": "", + "main": "build.js", + "scripts": { + "postinstall": "npm start", + "start": "tsc && node ./build.js" + }, + "dependencies": { + "@types/js-yaml": "latest", + "@types/node": "latest", + "js-yaml": "latest", + "plist": "latest", + "typescript": "latest" + } +} \ No newline at end of file diff --git a/syntaxes/build/tsconfig.json b/syntaxes/build/tsconfig.json new file mode 100644 index 0000000000..14c76f8513 --- /dev/null +++ b/syntaxes/build/tsconfig.json @@ -0,0 +1,10 @@ +{ + "compilerOptions": { + "target": "ES5", + "module": "commonjs", + "noImplicitAny": true + }, + "exclude": [ + "node_modules" + ] +} \ No newline at end of file diff --git a/syntaxes/csharp-new.json b/syntaxes/csharp-new.json deleted file mode 100644 index c4854af17f..0000000000 --- a/syntaxes/csharp-new.json +++ /dev/null @@ -1,2014 +0,0 @@ -{ - "name": "C#", - "scopeName": "source.cs", - "fileTypes": [ - "cs" - ], - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#directives" - }, - { - "include": "#declarations" - } - ], - "repository": { - "directives": { - "patterns": [ - { - "include": "#extern-alias-directive" - }, - { - "include": "#using-directive" - }, - { - "include": "#attribute-section" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "declarations": { - "patterns": [ - { - "include": "#namespace-declaration" - }, - { - "include": "#type-declarations" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "type-declarations": { - "patterns": [ - { - "include": "#class-declaration" - }, - { - "include": "#delegate-declaration" - }, - { - "include": "#enum-declaration" - }, - { - "include": "#interface-declaration" - }, - { - "include": "#struct-declaration" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "class-members": { - "patterns": [ - { - "include": "#type-declarations" - }, - { - "include": "#event-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#variable-initializer" - }, - { - "include": "#indexer-declaration" - }, - { - "include": "#field-declaration" - }, - { - "include": "#method-declaration" - }, - { - "include": "#constructor-declaration" - }, - { - "include": "#destructor-declaration" - }, - { - "include": "#operator-declaration" - }, - { - "include": "#conversion-operator-declaration" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "struct-members": { - "patterns": [ - { - "include": "#type-declarations" - }, - { - "include": "#event-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#indexer-declaration" - }, - { - "include": "#field-declaration" - }, - { - "include": "#variable-initializer" - }, - { - "include": "#method-declaration" - }, - { - "include": "#constructor-declaration" - }, - { - "include": "#destructor-declaration" - }, - { - "include": "#operator-declaration" - }, - { - "include": "#conversion-operator-declaration" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "interface-members": { - "patterns": [ - { - "include": "#event-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#indexer-declaration" - }, - { - "include": "#method-declaration" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "statement": { - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#control-statement" - }, - { - "include": "#expression" - }, - { - "include": "#block" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "expression": { - "patterns": [ - { - "include": "#interpolated-string" - }, - { - "include": "#verbatim-interpolated-string" - }, - { - "include": "#literal" - }, - { - "include": "#expression-operators" - }, - { - "include": "#object-creation-expression" - }, - { - "include": "#parenthesized-expression" - }, - { - "include": "#identifier" - } - ] - }, - "extern-alias-directive": { - "begin": "\\s*(extern)\\b\\s*(alias)\\b\\s*([_$[:alpha:]][_$[:alnum:]]*)", - "beginCaptures": { - "1": { - "name": "keyword.other.extern.cs" - }, - "2": { - "name": "keyword.other.alias.cs" - }, - "3": { - "name": "variable.other.alias.cs" - } - }, - "end": "(?=;)" - }, - "using-directive": { - "patterns": [ - { - "begin": "\\b(using)\\b\\s+(static)\\s+", - "beginCaptures": { - "1": { - "name": "keyword.other.using.cs" - }, - "2": { - "name": "keyword.other.static.cs" - } - }, - "end": "(?=;)", - "patterns": [ - { - "include": "#type" - } - ] - }, - { - "begin": "\\b(using)\\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\\s*=)", - "beginCaptures": { - "1": { - "name": "keyword.other.using.cs" - }, - "2": { - "name": "entity.name.type.alias.cs" - } - }, - "end": "(?=;)", - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#type" - }, - { - "include": "#operator-assignment" - } - ] - }, - { - "begin": "\\b(using)\\s*", - "beginCaptures": { - "1": { - "name": "keyword.other.using.cs" - } - }, - "end": "(?=;)", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "entity.name.type.namespace.cs", - "match": "[_$[:alpha:]][_$[:alnum:]]*" - }, - { - "include": "#operator-assignment" - } - ] - } - ] - }, - "attribute-section": { - "begin": "(\\[)(assembly|module|field|event|method|param|property|return|type)*(\\:)*", - "beginCaptures": { - "1": { - "name": "punctuation.squarebracket.open.cs" - }, - "2": { - "name": "keyword.other.attribute-specifier.cs" - }, - "3": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(\\])", - "endCaptures": { - "1": { - "name": "punctuation.squarebracket.close.cs" - } - }, - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#attribute" - }, - { - "include": "#punctuation-comma" - } - ] - }, - "attribute": { - "patterns": [ - { - "include": "#type-name" - }, - { - "include": "#attribute-arguments" - } - ] - }, - "attribute-arguments": { - "begin": "(\\()", - "beginCaptures": { - "1": { - "name": "punctuation.parenthesis.open.cs" - } - }, - "end": "(\\))", - "endCaptures": { - "1": { - "name": "punctuation.parenthesis.close.cs" - } - }, - "patterns": [ - { - "include": "#attribute-named-argument" - }, - { - "include": "#expression" - }, - { - "include": "#punctuation-comma" - } - ] - }, - "attribute-named-argument": { - "begin": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(?==)", - "beginCaptures": { - "1": { - "name": "entity.name.variable.property.cs" - } - }, - "end": "(?=(,|\\)))", - "patterns": [ - { - "include": "#operator-assignment" - }, - { - "include": "#expression" - } - ] - }, - "namespace-declaration": { - "begin": "\\b(namespace)\\s+", - "beginCaptures": { - "1": { - "name": "keyword.other.namespace.cs" - } - }, - "end": "(?<=\\})", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "entity.name.type.namespace.cs", - "match": "[_$[:alpha:]][_$[:alnum:]]*" - }, - { - "include": "#punctuation-accessor" - }, - { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#declarations" - }, - { - "include": "#using-directive" - }, - { - "include": "#punctuation-semicolon" - } - ] - } - ] - }, - "class-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\\s+)*)(?:class)\\s+)", - "end": "(?<=\\})", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "storage.modifier.cs", - "match": "\\b(new|public|protected|internal|private|abstract|sealed|static|partial)\\b" - }, - { - "begin": "(?=class)", - "end": "(?=\\{)", - "patterns": [ - { - "comment": "C# grammar: class identifier type-parameter-list[opt]", - "match": "(class)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.class.cs" - }, - "2": { - "name": "entity.name.type.class.cs" - } - } - }, - { - "include": "#generic-constraints" - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where)", - "patterns": [ - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, - { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#class-members" - } - ] - } - ] - }, - "delegate-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?:\\())", - "end": "(?=;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:(?:new|public|protected|internal|private)\\s+)*)(?(?:\\b(?:delegate)))\\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*<\\s*(?:(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\\s*(?:(?:in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\\s*>\\s*)?))\\s*(?=\\()", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "name": "keyword.other.delegate.cs" - }, - "3": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "4": { - "name": "entity.name.type.delegate.cs" - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#generic-constraints" - } - ] - }, - "enum-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private)\\s+)*)(?:enum)\\s+)", - "end": "(?<=\\})", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "storage.modifier.cs", - "match": "\\b(new|public|protected|internal|private)\\b" - }, - { - "begin": "(?=enum)", - "end": "(?=\\{)", - "patterns": [ - { - "comment": "C# grammar: enum identifier", - "match": "(enum)\\s+([_$[:alpha:]][_$[:alnum:]]*)", - "captures": { - "1": { - "name": "keyword.other.enum.cs" - }, - "2": { - "name": "entity.name.type.enum.cs" - } - } - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{)", - "patterns": [ - { - "include": "#type" - } - ] - } - ] - }, - { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#attribute-section" - }, - { - "include": "#punctuation-comma" - }, - { - "begin": "[_$[:alpha:]][_$[:alnum:]]*", - "beginCaptures": { - "0": { - "name": "variable.other.enummember.cs" - } - }, - "end": "(?=(,|\\}))", - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#variable-initializer" - } - ] - } - ] - } - ] - }, - "interface-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private|partial)\\s+)*)(?:interface)\\s+)", - "end": "(?<=\\})", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "storage.modifier.cs", - "match": "\\b(new|public|protected|internal|private|partial)\\b" - }, - { - "begin": "(?=interface)", - "end": "(?=\\{)", - "patterns": [ - { - "comment": "C# grammar: interface identifier variant-type-parameter-list[opt]", - "match": "(interface)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:((in|out)\\s+)?[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.interface.cs" - }, - "2": { - "name": "entity.name.type.interface.cs" - } - } - }, - { - "include": "#generic-constraints" - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where)", - "patterns": [ - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, - { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#interface-members" - } - ] - } - ] - }, - "struct-declaration": { - "begin": "(?=(?:((new|public|protected|internal|private|partial)\\s+)*)(?:struct)\\s+)", - "end": "(?<=\\})", - "patterns": [ - { - "include": "#comment" - }, - { - "name": "storage.modifier.cs", - "match": "\\b(new|public|protected|internal|private|partial)\\b" - }, - { - "begin": "(?=struct)", - "end": "(?=\\{)", - "patterns": [ - { - "comment": "C# grammar: struct identifier type-parameter-list[opt]", - "match": "(struct)\\s+([_$[:alpha:]][_$[:alnum:]]*(\\s*<\\s*(?:[_$[:alpha:]][_$[:alnum:]]*\\s*,\\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*>)?)", - "captures": { - "1": { - "name": "keyword.other.struct.cs" - }, - "2": { - "name": "entity.name.type.struct.cs" - } - } - }, - { - "include": "#generic-constraints" - }, - { - "begin": ":", - "beginCaptures": { - "0": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where)", - "patterns": [ - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, - { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#struct-members" - } - ] - } - ] - }, - "generic-constraints": { - "begin": "(where)\\s+(\\w+)\\s*(:)", - "beginCaptures": { - "1": { - "name": "keyword.other.where.cs" - }, - "2": { - "name": "storage.type.cs" - }, - "3": { - "name": "punctuation.separator.colon.cs" - } - }, - "end": "(?=\\{|where|;)", - "patterns": [ - { - "name": "keyword.other.class.cs", - "match": "\\bclass\\b" - }, - { - "name": "keyword.other.struct.cs", - "match": "\\bstruct\\b" - }, - { - "match": "(new)\\s*(\\()\\s*(\\))", - "captures": { - "1": { - "name": "keyword.other.new.cs" - }, - "2": { - "name": "punctuation.parenthesis.open.cs" - }, - "3": { - "name": "punctuation.parenthesis.close.cs" - } - } - }, - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - }, - { - "include": "#generic-constraints" - } - ] - }, - "field-declaration": { - "begin": "(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>|==)(?:;|=))", - "end": "(?=;)", - "patterns": [ - { - "include": "#comment" - }, - { - "begin": "(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?!=>|==)(?=;|=)", - "beginCaptures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private|static|readonly|volatile|const)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "name": "entity.name.variable.field.cs" - } - }, - "end": "(?=;)", - "patterns": [ - { - "match": "[_$[:alpha:]][_$[:alnum:]]*", - "name": "entity.name.variable.field.cs" - }, - { - "include": "#punctuation-comma" - }, - { - "include": "#variable-initializer" - } - ] - }, - { - "include": "#variable-initializer" - } - ] - }, - "property-declaration": { - "begin": "(?!.*\\b(?:class|interface|struct|enum|event)\\b)(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\{|=>|$))", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\{|=>|$)", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "4": { - "patterns": [ - { - "include": "#punctuation-accessor" - } - ] - }, - "5": { - "name": "entity.name.variable.property.cs" - } - } - }, - { - "include": "#property-accessors" - }, - { - "include": "#expression-body" - }, - { - "include": "#variable-initializer" - } - ] - }, - "indexer-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?:\\[))", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:\\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?this)\\s*(?=\\[)", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "4": { - "patterns": [ - { - "include": "#punctuation-accessor" - } - ] - }, - "5": { - "name": "keyword.other.this.cs" - } - } - }, - { - "begin": "(?=\\[)", - "end": "(?=\\])", - "patterns": [ - { - "include": "#bracketed-parameter-list" - } - ] - }, - { - "include": "#property-accessors" - }, - { - "include": "#expression-body" - }, - { - "include": "#variable-initializer" - } - ] - }, - "event-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?:\\{|;|$))", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b\\s*)*)\\s*\\b(?event)\\b\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\\s*,\\s*[_$[:alpha:]][_$[:alnum:]]*)*)\\s*(?=\\{|;|$)", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "name": "keyword.other.event.cs" - }, - "3": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "4": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "5": { - "patterns": [ - { - "include": "#punctuation-accessor" - } - ] - }, - "6": { - "patterns": [ - { - "match": "[_$[:alpha:]][_$[:alnum:]]*", - "name": "entity.name.variable.event.cs" - }, - { - "include": "#punctuation-comma" - } - ] - } - } - }, - { - "include": "#event-accessors" - }, - { - "include": "#punctuation-comma" - } - ] - }, - "property-accessors": { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "match": "\\b(private|protected|internal)\\b", - "name": "storage.modifier.cs" - }, - { - "match": "\\b(get)\\b", - "name": "keyword.other.get.cs" - }, - { - "match": "\\b(set)\\b", - "name": "keyword.other.set.cs" - }, - { - "include": "#block" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "event-accessors": { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "match": "\\b(add)\\b", - "name": "keyword.other.add.cs" - }, - { - "match": "\\b(remove)\\b", - "name": "keyword.other.remove.cs" - }, - { - "include": "#block" - }, - { - "include": "#punctuation-semicolon" - } - ] - }, - "method-declaration": { - "begin": "(?=(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?)\\s*(?:\\())", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:\\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b\\s*)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(\\s*\\.\\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\\s*<\\s*\\g(?:\\s*,\\s*\\g)*\\s*>\\s*)?)\\s*(?=\\()", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "4": { - "patterns": [ - { - "include": "#punctuation-accessor" - } - ] - }, - "5": { - "name": "entity.name.function.cs" - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#generic-constraints" - }, - { - "include": "#expression-body" - }, - { - "include": "#block" - } - ] - }, - "constructor-declaration": { - "begin": "(?=(?:(?(?:(?:public|protected|internal|private|extern|static)\\s+)+)\\s*(?:[_$[:alpha:]][_$[:alnum:]]*)|(?:[_$[:alpha:]][_$[:alnum:]]*))\\s*(?:\\())", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:(?:public|protected|internal|private|extern|static)\\s+)+)\\s*(?[_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(public|protected|internal|private|extern|static)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "name": "entity.name.function.cs" - } - } - }, - { - "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", - "captures": { - "1": { - "name": "entity.name.function.cs" - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#expression-body" - }, - { - "include": "#block" - } - ] - }, - "destructor-declaration": { - "begin": "(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\\s*(?:\\())", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(~)([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=\\()", - "captures": { - "1": { - "name": "punctuation.tilde.cs" - }, - "2": { - "name": "entity.name.function.cs" - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#expression-body" - }, - { - "include": "#block" - } - ] - }, - "operator-declaration": { - "begin": "(?=(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?(?:\\b(?:operator)))\\s*(?(?:\\+|-|\\*|/|%|&|\\||\\^|\\<\\<|\\>\\>|==|!=|\\>|\\<|\\>=|\\<=|!|~|\\+\\+|--|true|false))\\s*(?:\\())", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?(?:\\b(?:operator)))\\s*(?(?:\\+|-|\\*|/|%|&|\\||\\^|\\<\\<|\\>\\>|==|!=|\\>|\\<|\\>=|\\<=|!|~|\\+\\+|--|true|false))\\s*(?=\\()", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(public|static|extern)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "name": "keyword.other.operator.cs" - }, - "4": { - "name": "entity.name.function.cs" - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#expression-body" - }, - { - "include": "#block" - } - ] - }, - "conversion-operator-declaration": { - "begin": "(?=(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:\\b(?:explicit|implicit)))\\s*(?(?:\\b(?:operator)))\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?:\\())", - "end": "(?=\\}|;)", - "patterns": [ - { - "include": "#comment" - }, - { - "match": "(?(?:(?:public|static|extern)\\s+)*)\\s*(?(?:\\b(?:explicit|implicit)))\\s*(?(?:\\b(?:operator)))\\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\\s*\\:\\:\\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\s*\\.\\s*\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)\\s*(?=\\()", - "captures": { - "1": { - "patterns": [ - { - "match": "\\b(public|static|extern)\\b", - "captures": { - "1": { - "name": "storage.modifier.cs" - } - } - } - ] - }, - "2": { - "patterns": [ - { - "match": "\\b(explicit)\\b", - "captures": { - "1": { - "name": "keyword.other.explicit.cs" - } - } - }, - { - "match": "\\b(implicit)\\b", - "captures": { - "1": { - "name": "keyword.other.implicit.cs" - } - } - } - ] - }, - "3": { - "name": "keyword.other.operator.cs" - }, - "4": { - "patterns": [ - { - "include": "#type" - } - ] - } - } - }, - { - "include": "#parenthesized-parameter-list" - }, - { - "include": "#expression-body" - }, - { - "include": "#block" - } - ] - }, - "block": { - "begin": "\\{", - "beginCaptures": { - "0": { - "name": "punctuation.curlybrace.open.cs" - } - }, - "end": "\\}", - "endCaptures": { - "0": { - "name": "punctuation.curlybrace.close.cs" - } - }, - "patterns": [ - { - "include": "#statement" - } - ] - }, - "variable-initializer": { - "begin": "(?)", - "beginCaptures": { - "1": { - "name": "keyword.operator.assignment.cs" - } - }, - "end": "(?=[,\\);}])", - "patterns": [ - { - "include": "#expression" - } - ] - }, - "expression-body": { - "begin": "(=>)", - "beginCaptures": { - "1": { - "name": "keyword.operator.arrow.cs" - } - }, - "end": "(?=[,\\);}])", - "patterns": [ - { - "include": "#expression" - } - ] - }, - "control-statement": { - "patterns": [ - { - "name": "keyword.control.loop.cs", - "match": "(?>" - }, - { - "name": "keyword.operator.comparison.cs", - "match": "==|!=" - }, - { - "name": "keyword.operator.relational.cs", - "match": "<=|>=|<|>" - }, - { - "name": "keyword.operator.logical.cs", - "match": "\\!|&&|\\|\\|" - }, - { - "name": "keyword.operator.bitwise.cs", - "match": "\\&|~|\\^|\\|" - }, - { - "name": "keyword.operator.assignment.cs", - "match": "\\=" - }, - { - "name": "keyword.operator.decrement.cs", - "match": "--" - }, - { - "name": "keyword.operator.increment.cs", - "match": "\\+\\+" - }, - { - "name": "keyword.operator.arithmetic.cs", - "match": "%|\\*|/|-|\\+" - } - ] - }, - "parenthesized-expression": { - "begin": "\\(", - "beginCaptures": { - "0": { - "name": "punctuation.parenthesis.open.cs" - } - }, - "end": "\\)", - "endCaptures": { - "0": { - "name": "punctuation.parenthesis.close.cs" - } - }, - "patterns": [ - { - "include": "#expression" - } - ] - }, - "identifier": { - "name": "variable.other.readwrite.cs", - "match": "[_$[:alpha:]][_$[:alnum:]]*" - }, - "object-creation-expression": { - "begin": "(new)\\s+([_$[:alnum:]\\.\\*\\[\\]<>,\\s]+?)\\s*(?=\\()", - "beginCaptures": { - "1": { - "name": "keyword.other.new.cs" - }, - "2": { - "patterns": [ - { - "include": "#type" - } - ] - } - }, - "end": "(?<=\\))", - "patterns": [ - { - "include": "#parenthesized-parameter-list" - } - ] - }, - "bracketed-parameter-list": { - "begin": "(?=(\\[))", - "beginCaptures": { - "1": { - "name": "punctuation.squarebracket.open.cs" - } - }, - "end": "(?=(\\]))", - "endCaptures": { - "1": { - "name": "punctuation.squarebracket.close.cs" - } - }, - "patterns": [ - { - "comment": "Note: We have to be careful here to skip the [. Otherwise, attributes will conflict.", - "begin": "(?<=\\[)", - "end": "(?=\\])", - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#attribute-section" - }, - { - "match": "\\b(ref|params|out)\\b", - "name": "storage.modifier.cs" - }, - { - "comment": "parameter name", - "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=[,\\]])", - "captures": { - "1": { - "name": "variable.parameter.cs" - } - } - }, - { - "include": "#variable-initializer" - }, - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, - "parenthesized-parameter-list": { - "begin": "(\\()", - "beginCaptures": { - "0": { - "name": "punctuation.parenthesis.open.cs" - } - }, - "end": "(\\))", - "endCaptures": { - "0": { - "name": "punctuation.parenthesis.close.cs" - } - }, - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#attribute-section" - }, - { - "match": "\\b(ref|params|out)\\b", - "name": "storage.modifier.cs" - }, - { - "comment": "parameter name", - "match": "\\s+([_$[:alpha:]][_$[:alnum:]]*)\\s*(?=[,)])", - "captures": { - "1": { - "name": "variable.parameter.cs" - } - } - }, - { - "include": "#variable-initializer" - }, - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - }, - "type": { - "name": "meta.type.cs", - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#tuple-type" - }, - { - "include": "#type-builtin" - }, - { - "include": "#type-name" - }, - { - "include": "#type-parameters" - }, - { - "include": "#type-array-suffix" - } - ] - }, - "tuple-type": { - "patterns": [ - { - "begin": "\\(", - "beginCaptures": { - "0": { - "name": "punctuation.parenthesis.open.cs" - } - }, - "end": "\\)", - "endCaptures": { - "0": { - "name": "punctuation.parenthesis.close.cs" - } - }, - "patterns": [ - { - "match": "(?(?:(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\\s*\\.\\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\\s*<\\s*(?:\\g)(?:\\s*,\\s*\\g)*\\s*>\\s*)?(?:(?:\\*)*)?(?:(?:\\[,*\\])*)?(?:\\.\\g)*)|(?:\\s*\\(\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\\s*,\\s*(?:\\g)(?:\\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\\s*\\)\\s*))(?:(?:\\[,*\\])*)?)(?:\\s+(?[_$[:alpha:]][_$[:alnum:]]*))?", - "captures": { - "1": { - "patterns": [ - { - "include": "#type" - } - ] - }, - "2": { - "name": "entity.name.variable.tuple.cs" - } - } - }, - { - "include": "#punctuation-comma" - } - ] - } - ] - }, - "type-builtin": { - "patterns": [ - { - "match": "\\b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\\b", - "name": "storage.type.cs" - } - ] - }, - "type-name": { - "patterns": [ - { - "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(\\:\\:)", - "captures": { - "1": { - "name": "entity.name.type.alias.cs" - }, - "2": { - "name": "punctuation.separator.coloncolon.cs" - } - } - }, - { - "match": "([_$[:alpha:]][_$[:alnum:]]*)\\s*(\\.)", - "captures": { - "1": { - "name": "storage.type.cs" - }, - "2": { - "name": "punctuation.accessor.cs" - } - } - }, - { - "match": "(\\.)\\s*([_$[:alpha:]][_$[:alnum:]]*)", - "captures": { - "1": { - "name": "punctuation.accessor.cs" - }, - "2": { - "name": "storage.type.cs" - } - } - }, - { - "name": "storage.type.cs", - "match": "[_$[:alpha:]][_$[:alnum:]]*" - } - ] - }, - "type-parameters": { - "name": "meta.type.parameters.cs", - "begin": "(<)", - "beginCaptures": { - "1": { - "name": "punctuation.definition.typeparameters.begin.cs" - } - }, - "end": "(>)", - "endCaptures": { - "1": { - "name": "punctuation.definition.typeparameters.end.cs" - } - }, - "patterns": [ - { - "include": "#comment" - }, - { - "include": "#type" - }, - { - "include": "#punctuation-comma" - } - ] - }, - "type-array-suffix": { - "begin": "(\\[)", - "beginCaptures": { - "1": { - "name": "punctuation.squarebracket.open.cs" - } - }, - "end": "(\\])", - "endCaptures": { - "1": { - "name": "punctuation.squarebracket.close.cs" - } - }, - "patterns": [ - { - "include": "#punctuation-comma" - } - ] - }, - "operator-assignment": { - "name": "keyword.operator.assignment.cs", - "match": "(? + + + + name + C# + scopeName + source.cs + fileTypes + + cs + + uuid + f7de61e2-bdde-4e2a-a139-8221b179584e + patterns + + + include + #comment + + + include + #directives + + + include + #declarations + + + repository + + directives + + patterns + + + include + #extern-alias-directive + + + include + #using-directive + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + declarations + + patterns + + + include + #namespace-declaration + + + include + #type-declarations + + + include + #punctuation-semicolon + + + + type-declarations + + patterns + + + include + #class-declaration + + + include + #delegate-declaration + + + include + #enum-declaration + + + include + #interface-declaration + + + include + #struct-declaration + + + include + #punctuation-semicolon + + + + class-members + + patterns + + + include + #type-declarations + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #field-declaration + + + include + #variable-initializer + + + include + #method-declaration + + + include + #constructor-declaration + + + include + #destructor-declaration + + + include + #operator-declaration + + + include + #conversion-operator-declaration + + + include + #punctuation-semicolon + + + + struct-members + + patterns + + + include + #type-declarations + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #field-declaration + + + include + #variable-initializer + + + include + #method-declaration + + + include + #constructor-declaration + + + include + #destructor-declaration + + + include + #operator-declaration + + + include + #conversion-operator-declaration + + + include + #punctuation-semicolon + + + + interface-members + + patterns + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #method-declaration + + + include + #punctuation-semicolon + + + + statement + + patterns + + + include + #comment + + + include + #control-statement + + + include + #expression + + + include + #block + + + include + #punctuation-semicolon + + + + expression + + patterns + + + include + #interpolated-string + + + include + #verbatim-interpolated-string + + + include + #literal + + + include + #expression-operators + + + include + #object-creation-expression + + + include + #parenthesized-expression + + + include + #identifier + + + + extern-alias-directive + + begin + \s*(extern)\b\s*(alias)\b\s*([_$[:alpha:]][_$[:alnum:]]*) + beginCaptures + + 1 + + name + keyword.other.extern.cs + + 2 + + name + keyword.other.alias.cs + + 3 + + name + variable.other.alias.cs + + + end + (?=;) + + using-directive + + patterns + + + begin + \b(using)\b\s+(static)\s+ + beginCaptures + + 1 + + name + keyword.other.using.cs + + 2 + + name + keyword.other.static.cs + + + end + (?=;) + patterns + + + include + #type + + + + + begin + \b(using)\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\s*=) + beginCaptures + + 1 + + name + keyword.other.using.cs + + 2 + + name + entity.name.type.alias.cs + + + end + (?=;) + patterns + + + include + #comment + + + include + #type + + + include + #operator-assignment + + + + + begin + \b(using)\s* + beginCaptures + + 1 + + name + keyword.other.using.cs + + + end + (?=;) + patterns + + + include + #comment + + + name + entity.name.type.namespace.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #operator-assignment + + + + + + attribute-section + + begin + (\[)(assembly|module|field|event|method|param|property|return|type)?(\:)? + beginCaptures + + 1 + + name + punctuation.squarebracket.open.cs + + 2 + + name + keyword.other.attribute-specifier.cs + + 3 + + name + punctuation.separator.colon.cs + + + end + (\]) + endCaptures + + 1 + + name + punctuation.squarebracket.close.cs + + + patterns + + + include + #comment + + + include + #attribute + + + include + #punctuation-comma + + + + attribute + + patterns + + + include + #type-name + + + include + #attribute-arguments + + + + attribute-arguments + + begin + (\() + beginCaptures + + 1 + + name + punctuation.parenthesis.open.cs + + + end + (\)) + endCaptures + + 1 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #attribute-named-argument + + + include + #expression + + + include + #punctuation-comma + + + + attribute-named-argument + + begin + ([_$[:alpha:]][_$[:alnum:]]*)\s*(?==) + beginCaptures + + 1 + + name + entity.name.variable.property.cs + + + end + (?=(,|\))) + patterns + + + include + #operator-assignment + + + include + #expression + + + + namespace-declaration + + begin + \b(namespace)\s+ + beginCaptures + + 1 + + name + keyword.other.namespace.cs + + + end + (?<=\}) + patterns + + + include + #comment + + + name + entity.name.type.namespace.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #punctuation-accessor + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #declarations + + + include + #using-directive + + + include + #punctuation-semicolon + + + + + + class-declaration + + begin + (?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\s+)*)(?:class)\s+) + end + (?<=\}) + patterns + + + include + #comment + + + name + storage.modifier.cs + match + \b(new|public|protected|internal|private|abstract|sealed|static|partial)\b + + + begin + (?=class) + end + (?=\{) + patterns + + + match + (class)\s+([_$[:alpha:]][_$[:alnum:]]*(\s*<\s*(?:[_$[:alpha:]][_$[:alnum:]]*\s*,\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\s*>)?) + captures + + 1 + + name + keyword.other.class.cs + + 2 + + name + entity.name.type.class.cs + + + + + include + #generic-constraints + + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #class-members + + + + + + delegate-declaration + + begin + (?x) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private)\b\s+)*) +(?<delegate-keyword>(?:\b(?:delegate)\b))\s+ +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s+ +(?<delegate-name> + (?: + \g<identifier> + (?:\s*<\s*(?:(?:(?:in|out)\s+)?\g<identifier>)(?:,\s*(?:(?:in|out)\s+)?\g<identifier>)*\s*>\s*)? + ) +)\s* +(?=\() + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + name + keyword.other.delegate.cs + + 3 + + patterns + + + include + #type + + + + 5 + + name + entity.name.type.delegate.cs + + + end + (?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #generic-constraints + + + + enum-declaration + + begin + (?=(?:((new|public|protected|internal|private)\s+)*)(?:enum)\s+) + end + (?<=\}) + patterns + + + include + #comment + + + name + storage.modifier.cs + match + \b(new|public|protected|internal|private)\b + + + begin + (?=enum) + end + (?=\{) + patterns + + + match + (enum)\s+([_$[:alpha:]][_$[:alnum:]]*) + captures + + 1 + + name + keyword.other.enum.cs + + 2 + + name + entity.name.type.enum.cs + + + + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{) + patterns + + + include + #type + + + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #comment + + + include + #attribute-section + + + include + #punctuation-comma + + + begin + [_$[:alpha:]][_$[:alnum:]]* + beginCaptures + + 0 + + name + variable.other.enummember.cs + + + end + (?=(,|\})) + patterns + + + include + #comment + + + include + #variable-initializer + + + + + + + + interface-declaration + + begin + (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:interface)\s+) + end + (?<=\}) + patterns + + + include + #comment + + + name + storage.modifier.cs + match + \b(new|public|protected|internal|private|partial)\b + + + begin + (?=interface) + end + (?=\{) + patterns + + + match + (?x) +(interface)\s+ +( + (?<identifier>[_$[:alpha:]][_$[:alnum:]]*) + (\s*<\s*(?:(?:(?:in|out)\s+)?\g<identifier>\s*,\s*)*(?:(?:in|out)\s+)?\g<identifier>\s*>)? +) + captures + + 1 + + name + keyword.other.interface.cs + + 2 + + name + entity.name.type.interface.cs + + + + + include + #generic-constraints + + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #interface-members + + + + + + struct-declaration + + begin + (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:struct)\s+) + end + (?<=\}) + patterns + + + include + #comment + + + name + storage.modifier.cs + match + \b(new|public|protected|internal|private|partial)\b + + + begin + (?=struct) + end + (?=\{) + patterns + + + match + (?x) +(struct)\s+ +( + (?<identifier>[_$[:alpha:]][_$[:alnum:]]*) + (\s*<\s*(?:\g<identifier>\s*,\s*)*\g<identifier>\s*>)? +) + captures + + 1 + + name + keyword.other.struct.cs + + 2 + + name + entity.name.type.struct.cs + + + + + include + #generic-constraints + + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #struct-members + + + + + + generic-constraints + + begin + (where)\s+([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + beginCaptures + + 1 + + name + keyword.other.where.cs + + 2 + + name + storage.type.cs + + 3 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where|;) + patterns + + + name + keyword.other.class.cs + match + \bclass\b + + + name + keyword.other.struct.cs + match + \bstruct\b + + + match + (new)\s*(\()\s*(\)) + captures + + 1 + + name + keyword.other.new.cs + + 2 + + name + punctuation.parenthesis.open.cs + + 3 + + name + punctuation.parenthesis.close.cs + + + + + include + #type + + + include + #punctuation-comma + + + include + #generic-constraints + + + + field-declaration + + begin + (?x) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\b\s+)*)\s* +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s+ +(?<first-field-name>\g<identifier>)\s* +(?!=>|==)(?=;|=) + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private|static|readonly|volatile|const)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + include + #type + + + + 3 + + name + entity.name.variable.field.cs + + + end + (?=;) + patterns + + + name + entity.name.variable.field.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + + property-declaration + + begin + (?x) +(?!.*\b(?:class|interface|struct|enum|event)\b) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* +(?<return-type> + (?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<property-name>\g<identifier>)\s* +(?=\{|=>|$) + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + include + #type + + + + 5 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 6 + + name + entity.name.variable.property.cs + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #property-accessors + + + include + #expression-body + + + include + #variable-initializer + + + + indexer-declaration + + begin + (?x) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s* +(?<return-type> + (?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<indexer-name>this)\s* +(?=\[) + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + include + #type + + + + 5 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 6 + + name + keyword.other.this.cs + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #bracketed-parameter-list + + + include + #property-accessors + + + include + #expression-body + + + include + #variable-initializer + + + + event-declaration + + begin + (?x) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* +\b(?<event-keyword>event)\b\s* +(?<return-type> + (?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<event-names>\g<identifier>(?:\s*,\s*\g<identifier>)*)\s* +(?=\{|;|$) + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + name + keyword.other.event.cs + + 3 + + patterns + + + include + #type + + + + 6 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 7 + + patterns + + + name + entity.name.variable.event.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #punctuation-comma + + + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #event-accessors + + + include + #punctuation-comma + + + + property-accessors + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + name + storage.modifier.cs + match + \b(private|protected|internal)\b + + + name + keyword.other.get.cs + match + \b(get)\b + + + name + keyword.other.set.cs + match + \b(set)\b + + + include + #block + + + include + #punctuation-semicolon + + + + event-accessors + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + name + keyword.other.add.cs + match + \b(add)\b + + + name + keyword.other.remove.cs + match + \b(remove)\b + + + include + #block + + + include + #punctuation-semicolon + + + + method-declaration + + begin + (?x) +(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s* +(?<return-type> + (?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<method-name>\g<identifier>(?:\s*<\s*\g<identifier>(?:\s*,\s*\g<identifier>)*\s*>\s*)?)\s* +(?=\() + beginCaptures + + 1 + + patterns + + + match + \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + include + #type + + + + 5 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 6 + + name + entity.name.function.cs + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #generic-constraints + + + include + #expression-body + + + include + #block + + + + constructor-declaration + + begin + (?x) +(?= + # We're a extra careful here to avoid matching field declarations of the shape 'private (int i) x' + (?: + (?<storage-modifiers>(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* + (?<identifier>[_$[:alpha:]][_$[:alnum:]]*)| + (?:\g<identifier>) + )\s* + (?:\() +) + end + (?=\}|;) + patterns + + + match + (?x) +(?<storage-modifiers>(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* +(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s* +(?=\() + captures + + 1 + + patterns + + + match + \b(public|protected|internal|private|extern|static)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + name + entity.name.function.cs + + + + + match + ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + captures + + 1 + + name + entity.name.function.cs + + + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + destructor-declaration + + begin + (~)([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + beginCaptures + + 1 + + name + punctuation.tilde.cs + + 2 + + name + entity.name.function.cs + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + operator-declaration + + begin + (?x) +(?<storage-modifiers>(?:(?:public|static|extern)\s+)*)\s* +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s* +(?<operator-keyword>(?:\b(?:operator)))\s* +(?<operator>(?:\+|-|\*|/|%|&|\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s* +(?=\() + beginCaptures + + 1 + + patterns + + + match + \b(public|static|extern)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + include + #type + + + + 4 + + name + keyword.other.operator.cs + + 5 + + name + entity.name.function.cs + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + conversion-operator-declaration + + begin + (?x) +(?<storage-modifiers>(?:(?:public|static|extern)\s+)*)\s* +(?<explicit-or-implicit-keyword>(?:\b(?:explicit|implicit)))\s* +(?<operator-keyword>(?:\b(?:operator)))\s* +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s* +(?=\() + beginCaptures + + 1 + + patterns + + + match + \b(public|static|extern)\b + captures + + 1 + + name + storage.modifier.cs + + + + + + 2 + + patterns + + + match + \b(explicit)\b + captures + + 1 + + name + keyword.other.explicit.cs + + + + + match + \b(implicit)\b + captures + + 1 + + name + keyword.other.implicit.cs + + + + + + 3 + + name + keyword.other.operator.cs + + 4 + + patterns + + + include + #type + + + + + end + (?=\}|;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + block + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #statement + + + + variable-initializer + + begin + (?<!=|!)(=)(?!=|>) + beginCaptures + + 1 + + name + keyword.operator.assignment.cs + + + end + (?=[,\);}]) + patterns + + + include + #expression + + + + expression-body + + begin + => + beginCaptures + + 0 + + name + keyword.operator.arrow.cs + + + end + (?=[,\);}]) + patterns + + + include + #expression + + + + control-statement + + patterns + + + name + keyword.control.loop.cs + match + (?<!\.)\b(do|while)\b + + + name + keyword.control.flow.cs + match + (?<!\.)\b(return)\b + + + + interpolated-string + + name + string.quoted.double.cs + begin + \$" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + (")|((?:[^\\\n])$) + endCaptures + + 1 + + name + punctuation.definition.string.end.cs + + 2 + + name + invalid.illegal.newline.cs + + + patterns + + + include + #string-character-escape + + + include + #interpolation + + + + verbatim-interpolated-string + + name + string.quoted.double.cs + begin + \$@" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + "(?=[^"]) + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #verbatim-string-character-escape + + + include + #interpolation + + + + interpolation + + name + meta.interpolation.cs + begin + (?<=[^\{])((?:\{\{)*)(\{)(?=[^\{]) + beginCaptures + + 1 + + name + string.quoted.double.cs + + 2 + + name + punctuation.definition.interpolation.begin.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.definition.interpolation.end.cs + + + patterns + + + include + #expression + + + + literal + + patterns + + + include + #boolean-literal + + + include + #null-literal + + + include + #numeric-literal + + + include + #string-literal + + + include + #verbatim-string-literal + + + + boolean-literal + + patterns + + + name + constant.language.boolean.true.cs + match + (?<!\.)\btrue\b + + + name + constant.language.boolean.false.cs + match + (?<!\.)\bfalse\b + + + + null-literal + + name + constant.language.null.cs + match + (?<!\.)\bnull\b + + numeric-literal + + patterns + + + name + constant.numeric.hex.cs + match + \b0(x|X)[0-9a-fA-F_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + name + constant.numeric.binary.cs + match + \b0(b|B)[01_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + name + constant.numeric.decimal.cs + match + \b([0-9_]+)?\.[0-9_]+((e|E)[0-9]+)?(F|f|D|d|M|m)?\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(e|E)[0-9_]+(F|f|D|d|M|m)?\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(F|f|D|d|M|m)\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + + string-literal + + name + string.quoted.double.cs + begin + (?<!@)" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + (")|((?:[^\\\n])$) + endCaptures + + 1 + + name + punctuation.definition.string.end.cs + + 2 + + name + invalid.illegal.newline.cs + + + patterns + + + include + #string-character-escape + + + + string-character-escape + + name + constant.character.escape.cs + match + \\. + + verbatim-string-literal + + name + string.quoted.double.cs + begin + @" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + "(?=[^"]) + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #verbatim-string-character-escape + + + + verbatim-string-character-escape + + name + constant.character.escape.cs + match + "" + + expression-operators + + patterns + + + name + keyword.operator.bitwise.shift.cs + match + <<|>> + + + name + keyword.operator.comparison.cs + match + ==|!= + + + name + keyword.operator.relational.cs + match + <=|>=|<|> + + + name + keyword.operator.logical.cs + match + \!|&&|\|\| + + + name + keyword.operator.bitwise.cs + match + \&|~|\^|\| + + + name + keyword.operator.assignment.cs + match + \= + + + name + keyword.operator.decrement.cs + match + -- + + + name + keyword.operator.increment.cs + match + \+\+ + + + name + keyword.operator.arithmetic.cs + match + %|\*|/|-|\+ + + + + parenthesized-expression + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + identifier + + name + variable.other.readwrite.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + object-creation-expression + + begin + (?x) +(new)\s+ +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s* +(?=\() + beginCaptures + + 1 + + name + keyword.other.new.cs + + 2 + + patterns + + + include + #type + + + + + end + (?<=\)) + patterns + + + include + #parenthesized-parameter-list + + + + bracketed-parameter-list + + begin + (?=(\[)) + beginCaptures + + 1 + + name + punctuation.squarebracket.open.cs + + + end + (?=(\])) + endCaptures + + 1 + + name + punctuation.squarebracket.close.cs + + + patterns + + + begin + (?<=\[) + end + (?=\]) + patterns + + + include + #comment + + + include + #attribute-section + + + name + storage.modifier.cs + match + \b(ref|params|out)\b + + + match + \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,\]]) + captures + + 1 + + name + variable.parameter.cs + + + + + include + #variable-initializer + + + include + #type + + + include + #punctuation-comma + + + + + + parenthesized-parameter-list + + begin + (\() + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + (\)) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #attribute-section + + + name + storage.modifier.cs + match + \b(ref|params|out)\b + + + match + \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,)]) + captures + + 1 + + name + variable.parameter.cs + + + + + include + #variable-initializer + + + include + #type + + + include + #punctuation-comma + + + + type + + name + meta.type.cs + patterns + + + include + #comment + + + include + #tuple-type + + + include + #type-builtin + + + include + #type-name + + + include + #type-parameters + + + include + #type-array-suffix + + + + tuple-type + + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #tuple-element + + + include + #punctuation-comma + + + + + + tuple-element + + match + (?x) +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +) +(?:\s+(?<tuple-name>\g<identifier>))? + captures + + 1 + + patterns + + + include + #type + + + + 3 + + name + entity.name.variable.tuple.cs + + + + type-builtin + + match + \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\b + captures + + 1 + + name + storage.type.cs + + + + type-name + + patterns + + + match + ([_$[:alpha:]][_$[:alnum:]]*)\s*(\:\:) + captures + + 1 + + name + entity.name.type.alias.cs + + 2 + + name + punctuation.separator.coloncolon.cs + + + + + match + ([_$[:alpha:]][_$[:alnum:]]*)\s*(\.) + captures + + 1 + + name + storage.type.cs + + 2 + + name + punctuation.accessor.cs + + + + + match + (\.)\s*([_$[:alpha:]][_$[:alnum:]]*) + captures + + 1 + + name + punctuation.accessor.cs + + 2 + + name + storage.type.cs + + + + + name + storage.type.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + + type-parameters + + name + meta.type.parameters.cs + begin + < + beginCaptures + + 0 + + name + punctuation.definition.typeparameters.begin.cs + + + end + > + endCaptures + + 0 + + name + punctuation.definition.typeparameters.end.cs + + + patterns + + + include + #comment + + + include + #type + + + include + #punctuation-comma + + + + type-array-suffix + + begin + \[ + beginCaptures + + 0 + + name + punctuation.squarebracket.open.cs + + + end + \] + endCaptures + + 0 + + name + punctuation.squarebracket.close.cs + + + patterns + + + include + #punctuation-comma + + + + operator-assignment + + name + keyword.operator.assignment.cs + match + (?<!=|!)(=)(?!=) + + punctuation-comma + + name + punctuation.separator.comma.cs + match + , + + punctuation-semicolon + + name + punctuation.terminator.statement.cs + match + ; + + punctuation-accessor + + name + punctuation.accessor.cs + match + \. + + comment + + patterns + + + name + comment.block.cs + begin + /\* + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + \*/ + endCaptures + + 0 + + name + punctuation.definition.comment.cs + + + + + begin + (^[ \t]+)?(?=//) + beginCaptures + + 1 + + name + punctuation.whitespace.comment.leading.cs + + + end + (?=$) + patterns + + + name + comment.line.double-slash.cs + begin + // + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + (?=$) + + + + + + + + \ No newline at end of file diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml new file mode 100644 index 0000000000..7488f18c60 --- /dev/null +++ b/syntaxes/csharp.tmLanguage.yml @@ -0,0 +1,1150 @@ +# [PackageDev] target_format: plist, ext: tmLanguage +--- +name: C# +scopeName: source.cs +fileTypes: [cs] +uuid: f7de61e2-bdde-4e2a-a139-8221b179584e + +patterns: +- include: '#comment' +- include: '#directives' +- include: '#declarations' + +repository: + directives: + patterns: + - include: '#extern-alias-directive' + - include: '#using-directive' + - include: '#attribute-section' + - include: '#punctuation-semicolon' + + declarations: + patterns: + - include: '#namespace-declaration' + - include: '#type-declarations' + - include: '#punctuation-semicolon' + + type-declarations: + patterns: + - include: '#class-declaration' + - include: '#delegate-declaration' + - include: '#enum-declaration' + - include: '#interface-declaration' + - include: '#struct-declaration' + - include: '#punctuation-semicolon' + + class-members: + patterns: + - include: '#type-declarations' + - include: '#event-declaration' + - include: '#property-declaration' + - include: '#indexer-declaration' + - include: '#field-declaration' + - include: '#variable-initializer' + - include: '#method-declaration' + - include: '#constructor-declaration' + - include: '#destructor-declaration' + - include: '#operator-declaration' + - include: '#conversion-operator-declaration' + - include: '#punctuation-semicolon' + + struct-members: + patterns: + - include: '#type-declarations' + - include: '#event-declaration' + - include: '#property-declaration' + - include: '#indexer-declaration' + - include: '#field-declaration' + - include: '#variable-initializer' + - include: '#method-declaration' + - include: '#constructor-declaration' + - include: '#destructor-declaration' + - include: '#operator-declaration' + - include: '#conversion-operator-declaration' + - include: '#punctuation-semicolon' + + interface-members: + patterns: + - include: '#event-declaration' + - include: '#property-declaration' + - include: '#indexer-declaration' + - include: '#method-declaration' + - include: '#punctuation-semicolon' + + statement: + patterns: + - include: '#comment' + - include: '#control-statement' + - include: '#expression' + - include: '#block' + - include: '#punctuation-semicolon' + + expression: + patterns: + - include: '#interpolated-string' + - include: '#verbatim-interpolated-string' + - include: '#literal' + - include: '#expression-operators' + - include: '#object-creation-expression' + - include: '#parenthesized-expression' + - include: '#identifier' + + extern-alias-directive: + begin: \s*(extern)\b\s*(alias)\b\s*([_$[:alpha:]][_$[:alnum:]]*) + beginCaptures: + '1': { name: keyword.other.extern.cs } + '2': { name: keyword.other.alias.cs } + '3': { name: variable.other.alias.cs } + end: (?=;) + + using-directive: + patterns: + - begin: \b(using)\b\s+(static)\s+ + beginCaptures: + '1': { name: keyword.other.using.cs } + '2': { name: keyword.other.static.cs } + end: (?=;) + patterns: + - include: '#type' + - begin: \b(using)\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\s*=) + beginCaptures: + '1': { name: keyword.other.using.cs } + '2': { name: entity.name.type.alias.cs } + end: (?=;) + patterns: + - include: '#comment' + - include: '#type' + - include: '#operator-assignment' + - begin: \b(using)\s* + beginCaptures: + '1': { name: keyword.other.using.cs } + end: (?=;) + patterns: + - include: '#comment' + - name: entity.name.type.namespace.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#operator-assignment' + + attribute-section: + begin: (\[)(assembly|module|field|event|method|param|property|return|type)?(\:)? + beginCaptures: + '1': { name: punctuation.squarebracket.open.cs } + '2': { name: keyword.other.attribute-specifier.cs } + '3': { name: punctuation.separator.colon.cs } + end: (\]) + endCaptures: + '1': { name: punctuation.squarebracket.close.cs } + patterns: + - include: '#comment' + - include: '#attribute' + - include: '#punctuation-comma' + + attribute: + patterns: + - include: '#type-name' + - include: '#attribute-arguments' + + attribute-arguments: + begin: (\() + beginCaptures: + '1': { name: punctuation.parenthesis.open.cs } + end: (\)) + endCaptures: + '1': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#attribute-named-argument' + - include: '#expression' + - include: '#punctuation-comma' + + attribute-named-argument: + begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?==) + beginCaptures: + '1': { name: entity.name.variable.property.cs } + end: (?=(,|\))) + patterns: + - include: '#operator-assignment' + - include: '#expression' + + namespace-declaration: + begin: \b(namespace)\s+ + beginCaptures: + '1': { name: keyword.other.namespace.cs } + end: (?<=\}) + patterns: + - include: '#comment' + - name: entity.name.type.namespace.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#punctuation-accessor' + - begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#declarations' + - include: '#using-directive' + - include: '#punctuation-semicolon' + + class-declaration: + begin: (?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\s+)*)(?:class)\s+) + end: (?<=\}) + patterns: + - include: '#comment' + - name: storage.modifier.cs + match: \b(new|public|protected|internal|private|abstract|sealed|static|partial)\b + - begin: (?=class) + end: (?=\{) + patterns: + # C# grammar: class identifier type-parameter-list[opt] + - match: (class)\s+([_$[:alpha:]][_$[:alnum:]]*(\s*<\s*(?:[_$[:alpha:]][_$[:alnum:]]*\s*,\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\s*>)?) + captures: + '1': { name: keyword.other.class.cs } + '2': { name: entity.name.type.class.cs } + - include: '#generic-constraints' + - begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{|where) + patterns: + - include: '#type' + - include: '#punctuation-comma' + - begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#class-members' + + delegate-declaration: + begin: |- + (?x) + (?(?:\b(?:new|public|protected|internal|private)\b\s+)*) + (?(?:\b(?:delegate)\b))\s+ + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + (? + (?: + \g + (?:\s*<\s*(?:(?:(?:in|out)\s+)?\g)(?:,\s*(?:(?:in|out)\s+)?\g)*\s*>\s*)? + ) + )\s* + (?=\() + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private)\b + captures: + '1': { name: storage.modifier.cs } + '2': { name: keyword.other.delegate.cs } + '3': + patterns: + - include: '#type' + # '4': ? is a sub-expression. It's final value is not considered. + '5': { name: entity.name.type.delegate.cs } + end: (?=;) + patterns: + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#generic-constraints' + + enum-declaration: + begin: (?=(?:((new|public|protected|internal|private)\s+)*)(?:enum)\s+) + end: (?<=\}) + patterns: + - include: '#comment' + - name: storage.modifier.cs + match: \b(new|public|protected|internal|private)\b + - begin: (?=enum) + end: (?=\{) + patterns: + # C# grammar: enum identifier + - match: (enum)\s+([_$[:alpha:]][_$[:alnum:]]*) + captures: + '1': { name: keyword.other.enum.cs } + '2': { name: entity.name.type.enum.cs } + - begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{) + patterns: + - include: '#type' + - begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#comment' + - include: '#attribute-section' + - include: '#punctuation-comma' + - begin: '[_$[:alpha:]][_$[:alnum:]]*' + beginCaptures: + '0': { name: variable.other.enummember.cs } + end: (?=(,|\})) + patterns: + - include: '#comment' + - include: '#variable-initializer' + + interface-declaration: + begin: (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:interface)\s+) + end: (?<=\}) + patterns: + - include: '#comment' + - name: storage.modifier.cs + match: \b(new|public|protected|internal|private|partial)\b + - begin: (?=interface) + end: (?=\{) + patterns: + # C# grammar: interface identifier variant-type-parameter-list[opt] + - match: |- + (?x) + (interface)\s+ + ( + (?[_$[:alpha:]][_$[:alnum:]]*) + (\s*<\s*(?:(?:(?:in|out)\s+)?\g\s*,\s*)*(?:(?:in|out)\s+)?\g\s*>)? + ) + captures: + '1': { name: keyword.other.interface.cs } + '2': { name: entity.name.type.interface.cs } + - include: '#generic-constraints' + - begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{|where) + patterns: + - include: '#type' + - include: '#punctuation-comma' + - begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#interface-members' + + struct-declaration: + begin: (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:struct)\s+) + end: (?<=\}) + patterns: + - include: '#comment' + - name: storage.modifier.cs + match: \b(new|public|protected|internal|private|partial)\b + - begin: (?=struct) + end: (?=\{) + patterns: + # C# grammar: struct identifier type-parameter-list[opt] + - match: |- + (?x) + (struct)\s+ + ( + (?[_$[:alpha:]][_$[:alnum:]]*) + (\s*<\s*(?:\g\s*,\s*)*\g\s*>)? + ) + captures: + '1': { name: keyword.other.struct.cs } + '2': { name: entity.name.type.struct.cs } + - include: '#generic-constraints' + - begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{|where) + patterns: + - include: '#type' + - include: '#punctuation-comma' + - begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#struct-members' + + generic-constraints: + begin: (where)\s+([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + beginCaptures: + '1': { name: keyword.other.where.cs } + '2': { name: storage.type.cs } + '3': { name: punctuation.separator.colon.cs } + end: (?=\{|where|;) + patterns: + - name: keyword.other.class.cs + match: \bclass\b + - name: keyword.other.struct.cs + match: \bstruct\b + - match: (new)\s*(\()\s*(\)) + captures: + '1': { name: keyword.other.new.cs } + '2': { name: punctuation.parenthesis.open.cs } + '3': { name: punctuation.parenthesis.close.cs } + - include: '#type' + - include: '#punctuation-comma' + - include: '#generic-constraints' + + field-declaration: + begin: |- + (?x) + (?(?:\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\b\s+)*)\s* + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + (?\g)\s* + (?!=>|==)(?=;|=) + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private|static|readonly|volatile|const)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + '3': { name: entity.name.variable.field.cs } + end: (?=;) + patterns: + - name: entity.name.variable.field.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#punctuation-comma' + - include: '#comment' + - include: '#variable-initializer' + + property-declaration: + begin: |- + (?x) + (?!.*\b(?:class|interface|struct|enum|event)\b) + (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* + (? + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + ) + (?\g\s*\.\s*)? + (?\g)\s* + (?=\{|=>|$) + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + '5': + patterns: + - include: '#type' + - include: '#punctuation-accessor' + '6': { name: entity.name.variable.property.cs } + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#property-accessors' + - include: '#expression-body' + - include: '#variable-initializer' + + indexer-declaration: + begin: |- + (?x) + (?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s* + (? + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + ) + (?\g\s*\.\s*)? + (?this)\s* + (?=\[) + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + '5': + patterns: + - include: '#type' + - include: '#punctuation-accessor' + '6': + name: keyword.other.this.cs + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#bracketed-parameter-list' + - include: '#property-accessors' + - include: '#expression-body' + - include: '#variable-initializer' + + event-declaration: + begin: |- + (?x) + (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* + \b(?event)\b\s* + (? + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + ) + (?\g\s*\.\s*)? + (?\g(?:\s*,\s*\g)*)\s* + (?=\{|;|$) + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b + captures: + '1': { name: storage.modifier.cs } + '2': { name: keyword.other.event.cs } + '3': + patterns: + - include: '#type' + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': + patterns: + - include: '#type' + - include: '#punctuation-accessor' + '7': + patterns: + - name: entity.name.variable.event.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#punctuation-comma' + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#event-accessors' + - include: '#punctuation-comma' + + property-accessors: + begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - name: storage.modifier.cs + match: \b(private|protected|internal)\b + - name: keyword.other.get.cs + match: \b(get)\b + - name: keyword.other.set.cs + match: \b(set)\b + - include: '#block' + - include: '#punctuation-semicolon' + + event-accessors: + begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - name: keyword.other.add.cs + match: \b(add)\b + - name: keyword.other.remove.cs + match: \b(remove)\b + - include: '#block' + - include: '#punctuation-semicolon' + + method-declaration: + begin: |- + (?x) + (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s* + (? + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + ) + (?\g\s*\.\s*)? + (?\g(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)\s* + (?=\() + beginCaptures: + '1': + patterns: + - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + '5': + patterns: + - include: '#type' + - include: '#punctuation-accessor' + '6': + name: entity.name.function.cs + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#generic-constraints' + - include: '#expression-body' + - include: '#block' + + constructor-declaration: + begin: |- + (?x) + (?= + # We're a extra careful here to avoid matching field declarations of the shape 'private (int i) x' + (?: + (?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* + (?[_$[:alpha:]][_$[:alnum:]]*)| + (?:\g) + )\s* + (?:\() + ) + end: (?=\}|;) + patterns: + - match: |- + (?x) + (?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* + (?[_$[:alpha:]][_$[:alnum:]]*)\s* + (?=\() + captures: + '1': + patterns: + - match: \b(public|protected|internal|private|extern|static)\b + captures: + '1': { name: storage.modifier.cs } + '2': + name: entity.name.function.cs + - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + captures: + '1': { name: entity.name.function.cs } + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#expression-body' + - include: '#block' + + destructor-declaration: + begin: (~)([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + beginCaptures: + '1': { name: punctuation.tilde.cs } + '2': { name: entity.name.function.cs } + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#expression-body' + - include: '#block' + + operator-declaration: + begin: |- + (?x) + (?(?:(?:public|static|extern)\s+)*)\s* + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s* + (?(?:\b(?:operator)))\s* + (?(?:\+|-|\*|/|%|&|\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s* + (?=\() + beginCaptures: + '1': + patterns: + - match: \b(public|static|extern)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + '4': { name: keyword.other.operator.cs } + '5': { name: entity.name.function.cs } + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#expression-body' + - include: '#block' + + conversion-operator-declaration: + begin: |- + (?x) + (?(?:(?:public|static|extern)\s+)*)\s* + (?(?:\b(?:explicit|implicit)))\s* + (?(?:\b(?:operator)))\s* + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s* + (?=\() + beginCaptures: + '1': + patterns: + - match: \b(public|static|extern)\b + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - match: \b(explicit)\b + captures: + '1': { name: keyword.other.explicit.cs } + - match: \b(implicit)\b + captures: + '1': { name: keyword.other.implicit.cs } + '3': { name: keyword.other.operator.cs } + '4': + patterns: + - include: '#type' + end: (?=\}|;) + patterns: + - include: '#comment' + - include: '#parenthesized-parameter-list' + - include: '#expression-body' + - include: '#block' + + block: + begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#statement' + + variable-initializer: + begin: (?) + beginCaptures: + '1': { name: keyword.operator.assignment.cs } + end: (?=[,\);}]) + patterns: + - include: '#expression' + + expression-body: + begin: => + beginCaptures: + '0': { name: keyword.operator.arrow.cs } + end: (?=[,\);}]) + patterns: + - include: '#expression' + + control-statement: + patterns: + - name: keyword.control.loop.cs + match: (?> + - name: keyword.operator.comparison.cs + match: ==|!= + - name: keyword.operator.relational.cs + match: <=|>=|<|> + - name: keyword.operator.logical.cs + match: \!|&&|\|\| + - name: keyword.operator.bitwise.cs + match: \&|~|\^|\| + - name: keyword.operator.assignment.cs + match: \= + - name: keyword.operator.decrement.cs + match: -- + - name: keyword.operator.increment.cs + match: \+\+ + - name: keyword.operator.arithmetic.cs + match: '%|\*|/|-|\+' + + parenthesized-expression: + begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#expression' + + identifier: + name: variable.other.readwrite.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + + object-creation-expression: + begin: |- + (?x) + (new)\s+ + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s* + (?=\() + beginCaptures: + '1': { name: keyword.other.new.cs } + '2': + patterns: + - include: '#type' + end: (?<=\)) + patterns: + - include: '#parenthesized-parameter-list' + + bracketed-parameter-list: + begin: (?=(\[)) + beginCaptures: + '1': { name: punctuation.squarebracket.open.cs } + end: (?=(\])) + endCaptures: + '1': { name: punctuation.squarebracket.close.cs } + patterns: + # Note: We have to be careful here to skip the [. Otherwise, attributes will conflict. + - begin: (?<=\[) + end: (?=\]) + patterns: + - include: '#comment' + - include: '#attribute-section' + - name: storage.modifier.cs + match: \b(ref|params|out)\b + # parameter name + - match: \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,\]]) + captures: + '1': { name: variable.parameter.cs } + - include: '#variable-initializer' + - include: '#type' + - include: '#punctuation-comma' + + parenthesized-parameter-list: + begin: (\() + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: (\)) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#comment' + - include: '#attribute-section' + - name: storage.modifier.cs + match: \b(ref|params|out)\b + # parameter name + - match: \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,)]) + captures: + '1': { name: variable.parameter.cs } + - include: '#variable-initializer' + - include: '#type' + - include: '#punctuation-comma' + + type: + name: meta.type.cs + patterns: + - include: '#comment' + - include: '#tuple-type' + - include: '#type-builtin' + - include: '#type-name' + - include: '#type-parameters' + - include: '#type-array-suffix' + + tuple-type: + patterns: + - begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#tuple-element' + - include: '#punctuation-comma' + + tuple-element: + match: |- + (?x) + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + ) + (?:\s+(?\g))? + captures: + '1': + patterns: + - include: '#type' + # '2': ? is a sub-expression. It's final value is not considered. + '3': { name: entity.name.variable.tuple.cs } + + type-builtin: + match: \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\b + captures: + '1': { name: storage.type.cs } + + type-name: + patterns: + - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(\:\:) + captures: + '1': { name: entity.name.type.alias.cs } + '2': { name: punctuation.separator.coloncolon.cs } + - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(\.) + captures: + '1': { name: storage.type.cs } + '2': { name: punctuation.accessor.cs } + - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*) + captures: + '1': { name: punctuation.accessor.cs } + '2': { name: storage.type.cs } + - name: storage.type.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + + type-parameters: + name: meta.type.parameters.cs + begin: '<' + beginCaptures: + '0': { name: punctuation.definition.typeparameters.begin.cs } + end: '>' + endCaptures: + '0': { name: punctuation.definition.typeparameters.end.cs } + patterns: + - include: '#comment' + - include: '#type' + - include: '#punctuation-comma' + + type-array-suffix: + begin: \[ + beginCaptures: + '0': { name: punctuation.squarebracket.open.cs } + end: \] + endCaptures: + '0': { name: punctuation.squarebracket.close.cs } + patterns: + - include: '#punctuation-comma' + + operator-assignment: + name: keyword.operator.assignment.cs + match: (? Date: Fri, 6 Jan 2017 11:06:09 -0800 Subject: [PATCH 058/192] Share match for 'base types' --- syntaxes/csharp.tmLanguage | 101 +++++++++++---------------------- syntaxes/csharp.tmLanguage.yml | 33 ++++------- 2 files changed, 44 insertions(+), 90 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index e23bf097e0..2d8d3200e6 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -657,29 +657,8 @@ #generic-constraints - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where) - patterns - - - include - #type - - - include - #punctuation-comma - - + include + #base-types @@ -714,6 +693,32 @@ + base-types + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + delegate-declaration begin @@ -977,29 +982,8 @@ #generic-constraints - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where) - patterns - - - include - #type - - - include - #punctuation-comma - - + include + #base-types @@ -1086,29 +1070,8 @@ #generic-constraints - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where) - patterns - - - include - #type - - - include - #punctuation-comma - - + include + #base-types diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 7488f18c60..d0f758e5c0 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -202,13 +202,7 @@ repository: '1': { name: keyword.other.class.cs } '2': { name: entity.name.type.class.cs } - include: '#generic-constraints' - - begin: ':' - beginCaptures: - '0': { name: punctuation.separator.colon.cs } - end: (?=\{|where) - patterns: - - include: '#type' - - include: '#punctuation-comma' + - include: '#base-types' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } @@ -218,6 +212,15 @@ repository: patterns: - include: '#class-members' + base-types: + begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{|where) + patterns: + - include: '#type' + - include: '#punctuation-comma' + delegate-declaration: begin: |- (?x) @@ -319,13 +322,7 @@ repository: '1': { name: keyword.other.interface.cs } '2': { name: entity.name.type.interface.cs } - include: '#generic-constraints' - - begin: ':' - beginCaptures: - '0': { name: punctuation.separator.colon.cs } - end: (?=\{|where) - patterns: - - include: '#type' - - include: '#punctuation-comma' + - include: '#base-types' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } @@ -357,13 +354,7 @@ repository: '1': { name: keyword.other.struct.cs } '2': { name: entity.name.type.struct.cs } - include: '#generic-constraints' - - begin: ':' - beginCaptures: - '0': { name: punctuation.separator.colon.cs } - end: (?=\{|where) - patterns: - - include: '#type' - - include: '#punctuation-comma' + - include: '#base-types' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } From da284d39e158c35b3f21b0b37d33f122d54cfc8d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 11:09:42 -0800 Subject: [PATCH 059/192] Rename build.ts -> buildSyntax.ts and update npm commands --- build.ts => buildSyntax.ts | 0 package.json | 6 +++--- 2 files changed, 3 insertions(+), 3 deletions(-) rename build.ts => buildSyntax.ts (100%) diff --git a/build.ts b/buildSyntax.ts similarity index 100% rename from build.ts rename to buildSyntax.ts diff --git a/package.json b/package.json index 112a6e80d4..d07987caba 100644 --- a/package.json +++ b/package.json @@ -24,10 +24,10 @@ "main": "./out/src/main", "scripts": { "vscode:prepublish": "tsc -p ./", - "compile": "tsc -p ./ && gulp tslint", + "compile": "tsc -p ./ && gulp tslint && node ./out/buildSyntax.js", "watch": "tsc -watch -p ./", - "test": "node ./node_modules/vscode/bin/test", - "build-syntax": "node ./out/build.js", + "test": "node ./node_modules/vscode/bin/test && mocha --timeout 15000 --ui bdd ./out/test/syntaxes/*.test.syntax.js", + "build-syntax": "node ./out/buildSyntax.js", "test-syntax": "mocha --timeout 15000 --ui bdd ./out/test/syntaxes/*.test.syntax.js", "postinstall": "node ./node_modules/vscode/bin/install" }, From dc293b513f82d68889694fe36006feb2cf4d4b3a Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 11:29:57 -0800 Subject: [PATCH 060/192] Add support for constructor initializers --- syntaxes/csharp.tmLanguage | 171 ++++++++++++++++++---- syntaxes/csharp.tmLanguage.yml | 58 ++++++-- syntaxes/syntax.md | 3 - test/syntaxes/constructors.test.syntax.ts | 79 ++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 5 files changed, 272 insertions(+), 40 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 2d8d3200e6..b4ffffb0b9 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -693,32 +693,6 @@ - base-types - - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where) - patterns - - - include - #type - - - include - #punctuation-comma - - - delegate-declaration begin @@ -1106,6 +1080,32 @@ + base-types + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + generic-constraints begin @@ -1829,12 +1829,48 @@ include #expression-body + + include + #constructor-initializer + include #block + constructor-initializer + + begin + (:)\s*\b(?:(base)|(this))\b\s*(?=\() + beginCaptures + + 1 + + name + punctuation.separator.colon.cs + + 2 + + name + keyword.other.base.cs + + 3 + + name + keyword.other.this.cs + + + end + (?=\{|;) + patterns + + + include + #argument-list + + + destructor-declaration begin @@ -2586,7 +2622,7 @@ include - #parenthesized-parameter-list + #argument-list @@ -2727,6 +2763,87 @@ + argument-list + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #named-argument + + + include + #argument + + + include + #punctuation-comma + + + + named-argument + + begin + ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + beginCaptures + + 1 + + name + variable.parameter.cs + + 2 + + name + punctuation.separator.colon.cs + + + end + (?=(,|\))) + patterns + + + include + #expression + + + + argument + + patterns + + + name + storage.modifier.cs + match + \b(ref|out)\b + + + include + #expression + + + type name diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index d0f758e5c0..6b836330dc 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -212,15 +212,6 @@ repository: patterns: - include: '#class-members' - base-types: - begin: ':' - beginCaptures: - '0': { name: punctuation.separator.colon.cs } - end: (?=\{|where) - patterns: - - include: '#type' - - include: '#punctuation-comma' - delegate-declaration: begin: |- (?x) @@ -364,6 +355,15 @@ repository: patterns: - include: '#struct-members' + base-types: + begin: ':' + beginCaptures: + '0': { name: punctuation.separator.colon.cs } + end: (?=\{|where) + patterns: + - include: '#type' + - include: '#punctuation-comma' + generic-constraints: begin: (where)\s+([_$[:alpha:]][_$[:alnum:]]*)\s*(:) beginCaptures: @@ -672,8 +672,19 @@ repository: - include: '#comment' - include: '#parenthesized-parameter-list' - include: '#expression-body' + - include: '#constructor-initializer' - include: '#block' + constructor-initializer: + begin: (:)\s*\b(?:(base)|(this))\b\s*(?=\() + beginCaptures: + '1': { name: punctuation.separator.colon.cs } + '2': { name: keyword.other.base.cs } + '3': { name: keyword.other.this.cs } + end: (?=\{|;) + patterns: + - include: "#argument-list" + destructor-declaration: begin: (~)([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() beginCaptures: @@ -968,7 +979,7 @@ repository: - include: '#type' end: (?<=\)) patterns: - - include: '#parenthesized-parameter-list' + - include: '#argument-list' bracketed-parameter-list: begin: (?=(\[)) @@ -1014,6 +1025,33 @@ repository: - include: '#type' - include: '#punctuation-comma' + argument-list: + begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#named-argument' + - include: '#argument' + - include: '#punctuation-comma' + + named-argument: + begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + beginCaptures: + '1': { name: variable.parameter.cs } + '2': { name: punctuation.separator.colon.cs } + end: (?=(,|\))) + patterns: + - include: '#expression' + + argument: + patterns: + - name: storage.modifier.cs + match: \b(ref|out)\b + - include: '#expression' + type: name: meta.type.cs patterns: diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 94ebc81281..2f4936f147 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,8 +1,5 @@ ## TODO List: -* Declarations: - * Constructor initializers - * Statements/Expressions: * Local variable declarations * Method calls diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index 31e789d453..f3ca587ea4 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -77,5 +77,84 @@ TestClass(int x, int y) Token.Puncuation.OpenBrace, Token.Puncuation.CloseBrace]); }); + + it("instance constructor with 'this' initializer", () => { + + const input = Input.InClass(`TestClass() : this(42) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Colon, + Token.Keywords.This, + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("instance constructor with 'this' initializer with ref parameter", () => { + + const input = Input.InClass(`TestClass(int x) : this(ref x) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.Colon, + Token.Keywords.This, + Token.Puncuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("instance constructor with 'this' initializer with named parameter", () => { + + const input = Input.InClass(`TestClass(int x) : this(y: x) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.Colon, + Token.Keywords.This, + Token.Puncuation.OpenParen, + Token.Variables.Parameter("y"), + Token.Puncuation.Colon, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); + + it("instance constructor with 'base' initializer", () => { + + const input = Input.InClass(`TestClass() : base(42) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Colon, + Token.Keywords.Base, + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index ea89d2dfa7..ba9c9b7c4e 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -182,6 +182,7 @@ export namespace Token { export const Add = createToken('add', 'keyword.other.add.cs'); export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); + export const Base = createToken('base', 'keyword.other.base.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); export const Do = createToken('do', 'keyword.control.loop.cs'); From ca07e826c38879d709147a1cd03d2c7de075a23f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 11:36:47 -0800 Subject: [PATCH 061/192] Add support for labeled statements --- syntaxes/csharp.tmLanguage | 22 ++++++++++++++++++++++ syntaxes/csharp.tmLanguage.yml | 10 ++++++++++ test/syntaxes/labels.test.syntax.ts | 23 +++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 4 files changed, 56 insertions(+) create mode 100644 test/syntaxes/labels.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index b4ffffb0b9..66ecc902bc 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -245,6 +245,10 @@ include #control-statement + + include + #labeled-statement + include #expression @@ -2193,6 +2197,24 @@ + labeled-statement + + match + ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + captures + + 1 + + name + entity.name.label.cs + + 2 + + name + punctuation.separator.colon.cs + + + interpolated-string name diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 6b836330dc..1bff3d73f8 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -5,6 +5,9 @@ scopeName: source.cs fileTypes: [cs] uuid: f7de61e2-bdde-4e2a-a139-8221b179584e +# Important patterns: +# Identifier: [_$[:alpha:]][_$[:alnum:]]* + patterns: - include: '#comment' - include: '#directives' @@ -75,6 +78,7 @@ repository: patterns: - include: '#comment' - include: '#control-statement' + - include: '#labeled-statement' - include: '#expression' - include: '#block' - include: '#punctuation-semicolon' @@ -812,6 +816,12 @@ repository: - name: keyword.control.flow.cs match: (? { + before(() => should()); + + describe("Labels", () => { + it("declaration", () => { + const input = Input.InMethod(`Foo:`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.LabelName("Foo"), + Token.Puncuation.Colon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index ba9c9b7c4e..1d7ff55e8d 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -155,6 +155,7 @@ export namespace Token { export const EventName = (text: string) => createToken(text, 'entity.name.variable.event.cs'); export const FieldName = (text: string) => createToken(text, 'entity.name.variable.field.cs'); export const InterfaceName = (text: string) => createToken(text, 'entity.name.type.interface.cs'); + export const LabelName = (text: string) => createToken(text, 'entity.name.label.cs'); export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); From 9fbf4445f3e7e7dc4890f2fc317df8bd66a9ed55 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 11:46:56 -0800 Subject: [PATCH 062/192] Add support for local variable declarations --- syntaxes/csharp.tmLanguage | 68 ++++++++++++++++++++++++++++- syntaxes/csharp.tmLanguage.yml | 35 ++++++++++++++- syntaxes/syntax.md | 1 - test/syntaxes/locals.test.syntax.ts | 67 ++++++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 5 files changed, 167 insertions(+), 5 deletions(-) create mode 100644 test/syntaxes/locals.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 66ecc902bc..8645992690 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -241,13 +241,17 @@ include #comment + + include + #labeled-statement + include #control-statement include - #labeled-statement + #local-declaration include @@ -1202,7 +1206,7 @@ (?:(?:\[,*\])*)? )\s+ (?<first-field-name>\g<identifier>)\s* -(?!=>|==)(?=;|=) +(?!=>|==)(?=,|;|=) beginCaptures 1 @@ -2215,6 +2219,66 @@ + local-declaration + + begin + (?x) +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s+ +(?<first-local-name>\g<identifier>)\s* +(?=,|;|=) + beginCaptures + + 1 + + patterns + + + include + #type + + + + 2 + + name + variable.local.cs + + + end + (?=;) + patterns + + + name + variable.local.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + interpolated-string name diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 1bff3d73f8..dcf6ebd1ac 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -77,8 +77,9 @@ repository: statement: patterns: - include: '#comment' - - include: '#control-statement' - include: '#labeled-statement' + - include: '#control-statement' + - include: '#local-declaration' - include: '#expression' - include: '#block' - include: '#punctuation-semicolon' @@ -406,7 +407,7 @@ repository: (?:(?:\[,*\])*)? )\s+ (?\g)\s* - (?!=>|==)(?=;|=) + (?!=>|==)(?=,|;|=) beginCaptures: '1': patterns: @@ -822,6 +823,36 @@ repository: '1': { name: entity.name.label.cs } '2': { name: punctuation.separator.colon.cs } + local-declaration: + begin: |- + (?x) + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + (?\g)\s* + (?=,|;|=) + beginCaptures: + '1': + patterns: + - include: '#type' + '2': { name: variable.local.cs } + end: (?=;) + patterns: + - name: variable.local.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#punctuation-comma' + - include: '#comment' + - include: '#variable-initializer' + interpolated-string: name: string.quoted.double.cs begin: '\$"' diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 2f4936f147..341a54fd47 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,7 +1,6 @@ ## TODO List: * Statements/Expressions: - * Local variable declarations * Method calls * Element access * query expressions diff --git a/test/syntaxes/locals.test.syntax.ts b/test/syntaxes/locals.test.syntax.ts new file mode 100644 index 0000000000..aa51a90b2d --- /dev/null +++ b/test/syntaxes/locals.test.syntax.ts @@ -0,0 +1,67 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Locals", () => { + it("declaration", () => { + const input = Input.InMethod(`int x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Puncuation.Semicolon + ]); + }); + + it("declaration with initializer", () => { + const input = Input.InMethod(`int x = 42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon + ]); + }); + + it("multiple declarators", () => { + const input = Input.InMethod(`int x, y;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Puncuation.Comma, + Token.Variables.Local("y"), + Token.Puncuation.Semicolon + ]); + }); + + it("multiple declarators with initializers", () => { + const input = Input.InMethod(`int x = 19, y = 23;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Variables.Local("y"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 1d7ff55e8d..fabdd4497e 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -310,6 +310,7 @@ export namespace Token { export namespace Variables { export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); + export const Local = (text: string) => createToken(text, 'variable.local.cs'); export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); export const Tuple = (text: string) => createToken(text, 'entity.name.variable.tuple.cs'); From db7436050a1fa762a165b5c6cb2251412c5fec77 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 11:54:26 -0800 Subject: [PATCH 063/192] Add support for local constant declarations --- syntaxes/csharp.tmLanguage | 80 +++++++++++++++++++++++++++++ syntaxes/csharp.tmLanguage.yml | 37 +++++++++++++ test/syntaxes/locals.test.syntax.ts | 32 ++++++++++++ 3 files changed, 149 insertions(+) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 8645992690..0da5564995 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -2220,6 +2220,20 @@ local-declaration + + patterns + + + include + #local-constant-declaration + + + include + #local-variable-declaration + + + + local-variable-declaration begin (?x) @@ -2279,6 +2293,72 @@ + local-constant-declaration + + begin + (?x) +(?<const-keyword>\b(?:const)\b)\s* +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s+ +(?<first-local-name>\g<identifier>)\s* +(?=,|;|=) + beginCaptures + + 1 + + name + storage.modifier.cs + + 2 + + patterns + + + include + #type + + + + 3 + + name + variable.local.cs + + + end + (?=;) + patterns + + + name + variable.local.cs + match + [_$[:alpha:]][_$[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + interpolated-string name diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index dcf6ebd1ac..933072837d 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -824,6 +824,11 @@ repository: '2': { name: punctuation.separator.colon.cs } local-declaration: + patterns: + - include: '#local-constant-declaration' + - include: '#local-variable-declaration' + + local-variable-declaration: begin: |- (?x) (?(?: @@ -853,6 +858,38 @@ repository: - include: '#comment' - include: '#variable-initializer' + local-constant-declaration: + begin: |- + (?x) + (?\b(?:const)\b)\s* + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + (?\g)\s* + (?=,|;|=) + beginCaptures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + '3': { name: variable.local.cs } + end: (?=;) + patterns: + - name: variable.local.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#punctuation-comma' + - include: '#comment' + - include: '#variable-initializer' + interpolated-string: name: string.quoted.double.cs begin: '\$"' diff --git a/test/syntaxes/locals.test.syntax.ts b/test/syntaxes/locals.test.syntax.ts index aa51a90b2d..67c958cd20 100644 --- a/test/syntaxes/locals.test.syntax.ts +++ b/test/syntaxes/locals.test.syntax.ts @@ -63,5 +63,37 @@ describe("Grammar", () => { Token.Puncuation.Semicolon ]); }); + + it("const declaration", () => { + const input = Input.InMethod(`const int x = 42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Const, + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon + ]); + }); + + it("const with multiple declarators", () => { + const input = Input.InMethod(`const int x = 19, y = 23;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Const, + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Variables.Local("y"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.Semicolon + ]); + }); }); }); \ No newline at end of file From 1999df60dce6aa00e54df2c7c6167f23bec4045b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 12:11:59 -0800 Subject: [PATCH 064/192] Add support for if/else statements --- syntaxes/csharp.tmLanguage | 6 + syntaxes/csharp.tmLanguage.yml | 2 + syntaxes/syntax.md | 1 - .../selection-statements.test.syntax.ts | 214 ++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 + 5 files changed, 224 insertions(+), 1 deletion(-) create mode 100644 test/syntaxes/selection-statements.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 0da5564995..e0de046f7f 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -2199,6 +2199,12 @@ match (?<!\.)\b(return)\b + + name + keyword.control.conditional.cs + match + (?<!\.)\b(else|if)\b + labeled-statement diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 933072837d..2947d05eb8 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -816,6 +816,8 @@ repository: match: (? { + before(() => should()); + + describe("Selection statements", () => { + it("single-line if with embedded statement", () => { + const input = Input.InMethod(`if (true) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("single-line if with block", () => { + const input = Input.InMethod(`if (true) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("if with embedded statement", () => { + const input = Input.InMethod(` +if (true) + Do(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("if with block", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("if-else with embedded statements", () => { + const input = Input.InMethod(` +if (true) + Do(); +else + Dont(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Keywords.Else, + Token.Variables.ReadWrite("Dont"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("if-else with blocks", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +} +else +{ + Dont(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + Token.Keywords.Else, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Dont"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("if-elseif with embedded statements", () => { + const input = Input.InMethod(` +if (true) + Do(); +else if (false) + Dont(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Keywords.Else, + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.False, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Dont"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("if-elseif with blocks", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +} +else if (false) +{ + Dont(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + Token.Keywords.Else, + Token.Keywords.If, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.False, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Dont"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index fabdd4497e..94367fa199 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -187,11 +187,13 @@ export namespace Token { export const Class = createToken('class', 'keyword.other.class.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); export const Do = createToken('do', 'keyword.control.loop.cs'); + export const Else = createToken('else', 'keyword.control.conditional.cs'); export const Enum = createToken('enum', 'keyword.other.enum.cs'); export const Event = createToken('event', 'keyword.other.event.cs'); export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); + export const If = createToken('if', 'keyword.control.conditional.cs'); export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); From f6818cfb82a4ca175ab7d330e52210a69e8ad1f9 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 12:46:24 -0800 Subject: [PATCH 065/192] Add switch and goto statements --- syntaxes/csharp.tmLanguage | 207 +++++++++++++++++- syntaxes/csharp.tmLanguage.yml | 65 +++++- syntaxes/syntax.md | 3 +- .../selection-statements.test.syntax.ts | 39 ++++ test/syntaxes/utils/tokenize.ts | 10 + 5 files changed, 318 insertions(+), 6 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index e0de046f7f..9d63016b91 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -243,12 +243,20 @@ include - #labeled-statement + #switch-statement + + + include + #goto-statement include #control-statement + + include + #labeled-statement + include #local-declaration @@ -2187,11 +2195,17 @@ patterns + + name + keyword.control.trycatch.cs + match + (?<!\.)\b(catch|finally|throw|try)\b + name keyword.control.loop.cs match - (?<!\.)\b(do|while)\b + (?<!\.)\b(break|continue|do|while)\b name @@ -2207,6 +2221,195 @@ + switch-statement + + begin + \b(switch)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.switch.cs + + + end + (?=\}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #switch-label + + + include + #statement + + + + + + switch-label + + patterns + + + begin + \b(case)\b\s+ + beginCaptures + + 1 + + name + keyword.control.case.cs + + + end + : + endCaptures + + 0 + + name + punctuation.separator.colon.cs + + + patterns + + + include + #expression + + + + + match + \b(default)\b\s*(:) + captures + + 1 + + name + keyword.control.default.cs + + 2 + + name + punctuation.separator.colon.cs + + + + + + goto-statement + + begin + \b(goto)\b + beginCaptures + + 1 + + name + keyword.control.loop.cs + + + end + (?=;) + patterns + + + begin + \b(case)\b + beginCaptures + + 1 + + name + keyword.control.case.cs + + + end + (?=;) + patterns + + + include + #expression + + + + + match + \b(default)\b + captures + + 1 + + name + keyword.control.default.cs + + + + + name + entity.name.label.cs + match + [_$[:alpha]][_$[:alnum:]]* + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 2947d05eb8..867b058323 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -77,8 +77,10 @@ repository: statement: patterns: - include: '#comment' - - include: '#labeled-statement' + - include: '#switch-statement' + - include: '#goto-statement' - include: '#control-statement' + - include: '#labeled-statement' - include: '#local-declaration' - include: '#expression' - include: '#block' @@ -812,13 +814,72 @@ repository: control-statement: patterns: + - name: keyword.control.trycatch.cs + match: (? { + const input = Input.InMethod(` +switch (i) { +case 0: + goto case 1; +case 1: + goto default; +default: + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Switch, + Token.Puncuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Colon, + Token.Keywords.Goto, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Puncuation.Semicolon, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Puncuation.Colon, + Token.Keywords.Goto, + Token.Keywords.Default, + Token.Puncuation.Semicolon, + Token.Keywords.Default, + Token.Puncuation.Colon, + Token.Keywords.Break, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 94367fa199..0dd548a2ca 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -184,7 +184,12 @@ export namespace Token { export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); export const Base = createToken('base', 'keyword.other.base.cs'); + export const Break = createToken('break', 'keyword.control.loop.cs'); + export const Case = createToken('case', 'keyword.control.case.cs'); + export const Catch = createToken('catch', 'keyword.control.trycatch.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); + export const Continue = createToken('continue', 'keyword.control.loop.cs'); + export const Default = createToken('default', 'keyword.control.default.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); export const Do = createToken('do', 'keyword.control.loop.cs'); export const Else = createToken('else', 'keyword.control.conditional.cs'); @@ -192,7 +197,9 @@ export namespace Token { export const Event = createToken('event', 'keyword.other.event.cs'); export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); + export const Finally = createToken('finally', 'keyword.control.trycatch.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); + export const Goto = createToken('goto', 'keyword.control.loop.cs'); export const If = createToken('if', 'keyword.control.conditional.cs'); export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); @@ -204,7 +211,10 @@ export namespace Token { export const Set = createToken('set', 'keyword.other.set.cs'); export const Static = createToken('static', 'keyword.other.static.cs'); export const Struct = createToken('struct', 'keyword.other.struct.cs'); + export const Switch = createToken('switch', 'keyword.control.switch.cs'); export const This = createToken('this', 'keyword.other.this.cs'); + export const Throw = createToken('throw', 'keyword.control.trycatch.cs'); + export const Try = createToken('try', 'keyword.control.trycatch.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); export const While = createToken('while', 'keyword.control.loop.cs'); From 8a1c309a5bac43271a398ecb49cc29502a9316e1 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 12:50:23 -0800 Subject: [PATCH 066/192] Factor our return statements --- syntaxes/csharp.tmLanguage | 40 +++++++++++++++++++++++++--------- syntaxes/csharp.tmLanguage.yml | 19 +++++++++++----- 2 files changed, 43 insertions(+), 16 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 9d63016b91..d1976d6ebf 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -249,6 +249,10 @@ include #goto-statement + + include + #return-statement + include #control-statement @@ -2207,12 +2211,6 @@ match (?<!\.)\b(break|continue|do|while)\b - - name - keyword.control.flow.cs - match - (?<!\.)\b(return)\b - name keyword.control.conditional.cs @@ -2224,7 +2222,7 @@ switch-statement begin - \b(switch)\b\s*(?=\() + (?<!\.)\b(switch)\b\s*(?=\() beginCaptures 1 @@ -2307,7 +2305,7 @@ begin - \b(case)\b\s+ + (?<!\.)\b(case)\b\s+ beginCaptures 1 @@ -2336,7 +2334,7 @@ match - \b(default)\b\s*(:) + (?<!\.)\b(default)\b\s*(:) captures 1 @@ -2356,7 +2354,7 @@ goto-statement begin - \b(goto)\b + (?<!\.)\b(goto)\b beginCaptures 1 @@ -2410,6 +2408,28 @@ + return-statement + + begin + (?<!\.)\b(return)\b + beginCaptures + + 1 + + name + keyword.control.flow.cs + + + end + (?=;) + patterns + + + include + #expression + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 867b058323..21599f76aa 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -79,6 +79,7 @@ repository: - include: '#comment' - include: '#switch-statement' - include: '#goto-statement' + - include: '#return-statement' - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' @@ -818,13 +819,11 @@ repository: match: (? Date: Fri, 6 Jan 2017 12:56:22 -0800 Subject: [PATCH 067/192] Factor out do and while statements --- syntaxes/csharp.tmLanguage | 85 ++++++++++++++++++++++++++++++++- syntaxes/csharp.tmLanguage.yml | 30 +++++++++++- test/syntaxes/utils/tokenize.ts | 6 +-- 3 files changed, 114 insertions(+), 7 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index d1976d6ebf..e4597436d2 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -241,6 +241,14 @@ include #comment + + include + #while-statement + + + include + #do-statement + include #switch-statement @@ -2209,7 +2217,7 @@ name keyword.control.loop.cs match - (?<!\.)\b(break|continue|do|while)\b + (?<!\.)\b(break|continue)\b name @@ -2360,7 +2368,7 @@ 1 name - keyword.control.loop.cs + keyword.control.goto.cs end @@ -2430,6 +2438,79 @@ + do-statement + + begin + (?<!\.)\b(do)\b + beginCaptures + + 1 + + name + keyword.control.loop.do.cs + + + end + (?=;|}) + patterns + + + include + #statement + + + + while-statement + + begin + (?<!\.)\b(while)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.loop.while.cs + + + end + (?=;|}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + include + #statement + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 21599f76aa..b8a43ca92d 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -77,6 +77,8 @@ repository: statement: patterns: - include: '#comment' + - include: '#while-statement' + - include: '#do-statement' - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' @@ -818,7 +820,7 @@ repository: - name: keyword.control.trycatch.cs match: (? Date: Fri, 6 Jan 2017 13:21:34 -0800 Subject: [PATCH 068/192] Add support for for statements --- syntaxes/csharp.tmLanguage | 67 +++++++++++++++++++ syntaxes/csharp.tmLanguage.yml | 20 ++++++ .../iteration-statements.test.syntax.ts | 25 +++++++ test/syntaxes/utils/tokenize.ts | 1 + 4 files changed, 113 insertions(+) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index e4597436d2..54bcdb37ae 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -249,6 +249,10 @@ include #do-statement + + include + #for-statement + include #switch-statement @@ -2511,6 +2515,69 @@ + for-statement + + begin + (?<!\.)\b(for)\b + beginCaptures + + 1 + + name + keyword.control.loop.for.cs + + + end + (?=\;|}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #local-variable-declaration + + + include + #expression + + + include + #punctuation-comma + + + include + #punctuation-semicolon + + + + + include + #statement + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index b8a43ca92d..1e1e641d01 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -79,6 +79,7 @@ repository: - include: '#comment' - include: '#while-statement' - include: '#do-statement' + - include: '#for-statement' - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' @@ -913,6 +914,25 @@ repository: - include: '#expression' - include: '#statement' + for-statement: + begin: (? { Token.Puncuation.Semicolon ]); }); + + it("single-line for loop", () => { + + const input = Input.InMethod(`for (int i = 0; i < 42; i++) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.For, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 111bc94407..9afe574659 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -198,6 +198,7 @@ export namespace Token { export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); export const Finally = createToken('finally', 'keyword.control.trycatch.cs'); + export const For = createToken('for', 'keyword.control.loop.for.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); export const Goto = createToken('goto', 'keyword.control.goto.cs'); export const If = createToken('if', 'keyword.control.conditional.cs'); From 58d763654a5c04f6afaf4d3804bdb3ce17710b55 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 14:16:30 -0800 Subject: [PATCH 069/192] Add foreach statements --- syntaxes/csharp.tmLanguage | 96 +++++++++++++++++++ syntaxes/csharp.tmLanguage.yml | 40 ++++++++ syntaxes/syntax.md | 2 - .../iteration-statements.test.syntax.ts | 18 ++++ test/syntaxes/utils/tokenize.ts | 2 + 5 files changed, 156 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 54bcdb37ae..e352b39d0f 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -253,6 +253,10 @@ include #for-statement + + include + #foreach-statement + include #switch-statement @@ -2578,6 +2582,98 @@ + foreach-statement + + begin + (?<!\.)\b(foreach)\b + beginCaptures + + 1 + + name + keyword.control.loop.foreach.cs + + + end + (?=\;|}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + match + (?x) +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +)\s+ +(?<local-name>\g<identifier>)\s+ +\b(?<in-keyword>in)\b + captures + + 1 + + patterns + + + include + #type + + + + 3 + + name + variable.local.cs + + 4 + + name + keyword.control.loop.in.cs + + + + + include + #expression + + + + + include + #statement + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 1e1e641d01..4d95fa6849 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -80,6 +80,7 @@ repository: - include: '#while-statement' - include: '#do-statement' - include: '#for-statement' + - include: '#foreach-statement' - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' @@ -933,6 +934,45 @@ repository: - include: '#punctuation-semicolon' - include: '#statement' + foreach-statement: + begin: (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s+ + (?\g)\s+ + \b(?in)\b + captures: + '1': + patterns: + - include: '#type' + # '2': ? is a sub-expression. It's final value is not considered. + '3': { name: variable.local.cs } + '4': { name: keyword.control.loop.in.cs } + - include: '#expression' + - include: '#statement' + labeled-statement: match: '([_$[:alpha:]][_$[:alnum:]]*)\s*(:)' captures: diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index e5d7b75a61..a05fbfa5ec 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -4,8 +4,6 @@ * Method calls * Element access * query expressions - * for loops - * foreach loops * lambda expressions and anonymous functions * array, collection and object initializers * casts diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 4e0338c50d..6580f5a3c1 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -65,5 +65,23 @@ describe("Grammar", () => { Token.Puncuation.CloseBrace, ]); }); + + it("single-line foreach loop", () => { + + const input = Input.InMethod(`foreach (int i in numbers) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.ForEach, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Local("i"), + Token.Keywords.In, + Token.Variables.ReadWrite("numbers"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 9afe574659..eb2dc065be 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -199,10 +199,12 @@ export namespace Token { export const Extern = createToken('extern', 'keyword.other.extern.cs'); export const Finally = createToken('finally', 'keyword.control.trycatch.cs'); export const For = createToken('for', 'keyword.control.loop.for.cs'); + export const ForEach = createToken('foreach', 'keyword.control.loop.foreach.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); export const Goto = createToken('goto', 'keyword.control.goto.cs'); export const If = createToken('if', 'keyword.control.conditional.cs'); export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); + export const In = createToken('in', 'keyword.control.loop.in.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); From 586fd968e4badc7f89be7630f5477a8f19a66283 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 14:47:00 -0800 Subject: [PATCH 070/192] Add support for try statements with finally and catch clauses --- syntaxes/csharp.tmLanguage | 187 +++++++++++++++++++- syntaxes/csharp.tmLanguage.yml | 70 +++++++- syntaxes/syntax.md | 1 - test/syntaxes/try-statements.test.syntax.ts | 177 ++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 9 +- 5 files changed, 431 insertions(+), 13 deletions(-) create mode 100644 test/syntaxes/try-statements.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index e352b39d0f..5fbd678e95 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -269,6 +269,10 @@ include #return-statement + + include + #try-statement + include #control-statement @@ -2215,12 +2219,6 @@ patterns - - name - keyword.control.trycatch.cs - match - (?<!\.)\b(catch|finally|throw|try)\b - name keyword.control.loop.cs @@ -2674,6 +2672,183 @@ + try-statement + + begin + (?<!\.)\b(try)\b + beginCaptures + + 1 + + name + keyword.control.try.cs + + + end + (?=\}) + patterns + + + include + #block + + + include + #catch-clause + + + include + #finally-clause + + + + finally-clause + + begin + (?<!\.)\b(finally)\b + beginCaptures + + 1 + + name + keyword.control.try.finally.cs + + + end + (?=\}) + patterns + + + include + #block + + + + catch-clause + + begin + (?<!\.)\b(catch)\b + beginCaptures + + 1 + + name + keyword.control.try.catch.cs + + + end + (?=\}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + match + (?x) +(?<type-name>(?: + (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) + (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g<type-name>)* + )| + (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? +) +(?:\s+\b(?<local-name>\g<identifier>)\b)? + captures + + 1 + + patterns + + + include + #type + + + + 3 + + name + variable.local.cs + + + + + + + include + #when-clause + + + include + #block + + + include + #finally-clause + + + + when-clause + + begin + (?<!\.)\b(when)\b\s*(\() + beginCaptures + + 1 + + name + keyword.control.try.when.cs + + 2 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + labeled-statement match diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 4d95fa6849..e67402801c 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -84,6 +84,7 @@ repository: - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' + - include: '#try-statement' - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' @@ -819,8 +820,6 @@ repository: control-statement: patterns: - - name: keyword.control.trycatch.cs - match: (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + ) + (?:\s+\b(?\g)\b)? + captures: + '1': + patterns: + - include: '#type' + # '2': ? is a sub-expression. It's final value is not considered. + '3': { name: variable.local.cs } + - include: '#when-clause' + - include: '#block' + - include: '#finally-clause' + + when-clause: + begin: (? { + before(() => should()); + + describe("Try statements", () => { + it("try-finally", () => { + const input = Input.InMethod(` +try +{ +} +finally +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Finally, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch", () => { + const input = Input.InMethod(` +try +{ +} +catch +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch-finally", () => { + const input = Input.InMethod(` +try +{ +} +catch +{ +} +finally +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Finally, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch with exception type", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Puncuation.OpenParen, + Token.Type("Exception"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch with exception type and identifier", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception ex) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Puncuation.OpenParen, + Token.Type("Exception"), + Token.Variables.Local("ex"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch with exception filter", () => { + const input = Input.InMethod(` +try +{ +} +catch when (true) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Keywords.When, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("try-catch with exception type and filter", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception) when (true) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace, + Token.Keywords.Catch, + Token.Puncuation.OpenParen, + Token.Type("Exception"), + Token.Puncuation.CloseParen, + Token.Keywords.When, + Token.Puncuation.OpenParen, + Token.Literals.Boolean.True, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index eb2dc065be..25cdfe7a04 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -186,7 +186,7 @@ export namespace Token { export const Base = createToken('base', 'keyword.other.base.cs'); export const Break = createToken('break', 'keyword.control.loop.cs'); export const Case = createToken('case', 'keyword.control.case.cs'); - export const Catch = createToken('catch', 'keyword.control.trycatch.cs'); + export const Catch = createToken('catch', 'keyword.control.try.catch.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); export const Continue = createToken('continue', 'keyword.control.loop.cs'); export const Default = createToken('default', 'keyword.control.default.cs'); @@ -197,7 +197,7 @@ export namespace Token { export const Event = createToken('event', 'keyword.other.event.cs'); export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); - export const Finally = createToken('finally', 'keyword.control.trycatch.cs'); + export const Finally = createToken('finally', 'keyword.control.try.finally.cs'); export const For = createToken('for', 'keyword.control.loop.for.cs'); export const ForEach = createToken('foreach', 'keyword.control.loop.foreach.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); @@ -216,9 +216,10 @@ export namespace Token { export const Struct = createToken('struct', 'keyword.other.struct.cs'); export const Switch = createToken('switch', 'keyword.control.switch.cs'); export const This = createToken('this', 'keyword.other.this.cs'); - export const Throw = createToken('throw', 'keyword.control.trycatch.cs'); - export const Try = createToken('try', 'keyword.control.trycatch.cs'); + export const Throw = createToken('throw', 'keyword.control.throw.cs'); + export const Try = createToken('try', 'keyword.control.try.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); + export const When = createToken('when', 'keyword.control.try.when.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); export const While = createToken('while', 'keyword.control.loop.while.cs'); } From 61c7d88f2de73947e70ad4bf1ea806c867d08fb3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:02:30 -0800 Subject: [PATCH 071/192] Remove syntaxes/csharp.tmLanguage since we build it --- .gitignore | 1 + syntaxes/csharp.tmLanguage | 4007 ------------------------------------ 2 files changed, 1 insertion(+), 4007 deletions(-) delete mode 100644 syntaxes/csharp.tmLanguage diff --git a/.gitignore b/.gitignore index 75442c2235..5c2a0dfe6b 100644 --- a/.gitignore +++ b/.gitignore @@ -6,5 +6,6 @@ out .vscode-test install.* +syntaxes/csharp.tmLanguage *.vsix diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage deleted file mode 100644 index 5fbd678e95..0000000000 --- a/syntaxes/csharp.tmLanguage +++ /dev/null @@ -1,4007 +0,0 @@ - - - - - name - C# - scopeName - source.cs - fileTypes - - cs - - uuid - f7de61e2-bdde-4e2a-a139-8221b179584e - patterns - - - include - #comment - - - include - #directives - - - include - #declarations - - - repository - - directives - - patterns - - - include - #extern-alias-directive - - - include - #using-directive - - - include - #attribute-section - - - include - #punctuation-semicolon - - - - declarations - - patterns - - - include - #namespace-declaration - - - include - #type-declarations - - - include - #punctuation-semicolon - - - - type-declarations - - patterns - - - include - #class-declaration - - - include - #delegate-declaration - - - include - #enum-declaration - - - include - #interface-declaration - - - include - #struct-declaration - - - include - #punctuation-semicolon - - - - class-members - - patterns - - - include - #type-declarations - - - include - #event-declaration - - - include - #property-declaration - - - include - #indexer-declaration - - - include - #field-declaration - - - include - #variable-initializer - - - include - #method-declaration - - - include - #constructor-declaration - - - include - #destructor-declaration - - - include - #operator-declaration - - - include - #conversion-operator-declaration - - - include - #punctuation-semicolon - - - - struct-members - - patterns - - - include - #type-declarations - - - include - #event-declaration - - - include - #property-declaration - - - include - #indexer-declaration - - - include - #field-declaration - - - include - #variable-initializer - - - include - #method-declaration - - - include - #constructor-declaration - - - include - #destructor-declaration - - - include - #operator-declaration - - - include - #conversion-operator-declaration - - - include - #punctuation-semicolon - - - - interface-members - - patterns - - - include - #event-declaration - - - include - #property-declaration - - - include - #indexer-declaration - - - include - #method-declaration - - - include - #punctuation-semicolon - - - - statement - - patterns - - - include - #comment - - - include - #while-statement - - - include - #do-statement - - - include - #for-statement - - - include - #foreach-statement - - - include - #switch-statement - - - include - #goto-statement - - - include - #return-statement - - - include - #try-statement - - - include - #control-statement - - - include - #labeled-statement - - - include - #local-declaration - - - include - #expression - - - include - #block - - - include - #punctuation-semicolon - - - - expression - - patterns - - - include - #interpolated-string - - - include - #verbatim-interpolated-string - - - include - #literal - - - include - #expression-operators - - - include - #object-creation-expression - - - include - #parenthesized-expression - - - include - #identifier - - - - extern-alias-directive - - begin - \s*(extern)\b\s*(alias)\b\s*([_$[:alpha:]][_$[:alnum:]]*) - beginCaptures - - 1 - - name - keyword.other.extern.cs - - 2 - - name - keyword.other.alias.cs - - 3 - - name - variable.other.alias.cs - - - end - (?=;) - - using-directive - - patterns - - - begin - \b(using)\b\s+(static)\s+ - beginCaptures - - 1 - - name - keyword.other.using.cs - - 2 - - name - keyword.other.static.cs - - - end - (?=;) - patterns - - - include - #type - - - - - begin - \b(using)\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\s*=) - beginCaptures - - 1 - - name - keyword.other.using.cs - - 2 - - name - entity.name.type.alias.cs - - - end - (?=;) - patterns - - - include - #comment - - - include - #type - - - include - #operator-assignment - - - - - begin - \b(using)\s* - beginCaptures - - 1 - - name - keyword.other.using.cs - - - end - (?=;) - patterns - - - include - #comment - - - name - entity.name.type.namespace.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #operator-assignment - - - - - - attribute-section - - begin - (\[)(assembly|module|field|event|method|param|property|return|type)?(\:)? - beginCaptures - - 1 - - name - punctuation.squarebracket.open.cs - - 2 - - name - keyword.other.attribute-specifier.cs - - 3 - - name - punctuation.separator.colon.cs - - - end - (\]) - endCaptures - - 1 - - name - punctuation.squarebracket.close.cs - - - patterns - - - include - #comment - - - include - #attribute - - - include - #punctuation-comma - - - - attribute - - patterns - - - include - #type-name - - - include - #attribute-arguments - - - - attribute-arguments - - begin - (\() - beginCaptures - - 1 - - name - punctuation.parenthesis.open.cs - - - end - (\)) - endCaptures - - 1 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #attribute-named-argument - - - include - #expression - - - include - #punctuation-comma - - - - attribute-named-argument - - begin - ([_$[:alpha:]][_$[:alnum:]]*)\s*(?==) - beginCaptures - - 1 - - name - entity.name.variable.property.cs - - - end - (?=(,|\))) - patterns - - - include - #operator-assignment - - - include - #expression - - - - namespace-declaration - - begin - \b(namespace)\s+ - beginCaptures - - 1 - - name - keyword.other.namespace.cs - - - end - (?<=\}) - patterns - - - include - #comment - - - name - entity.name.type.namespace.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #punctuation-accessor - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #declarations - - - include - #using-directive - - - include - #punctuation-semicolon - - - - - - class-declaration - - begin - (?=(?:((new|public|protected|internal|private|abstract|sealed|static|partial)\s+)*)(?:class)\s+) - end - (?<=\}) - patterns - - - include - #comment - - - name - storage.modifier.cs - match - \b(new|public|protected|internal|private|abstract|sealed|static|partial)\b - - - begin - (?=class) - end - (?=\{) - patterns - - - match - (class)\s+([_$[:alpha:]][_$[:alnum:]]*(\s*<\s*(?:[_$[:alpha:]][_$[:alnum:]]*\s*,\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\s*>)?) - captures - - 1 - - name - keyword.other.class.cs - - 2 - - name - entity.name.type.class.cs - - - - - include - #generic-constraints - - - include - #base-types - - - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #class-members - - - - - - delegate-declaration - - begin - (?x) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private)\b\s+)*) -(?<delegate-keyword>(?:\b(?:delegate)\b))\s+ -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s+ -(?<delegate-name> - (?: - \g<identifier> - (?:\s*<\s*(?:(?:(?:in|out)\s+)?\g<identifier>)(?:,\s*(?:(?:in|out)\s+)?\g<identifier>)*\s*>\s*)? - ) -)\s* -(?=\() - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - name - keyword.other.delegate.cs - - 3 - - patterns - - - include - #type - - - - 5 - - name - entity.name.type.delegate.cs - - - end - (?=;) - patterns - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #generic-constraints - - - - enum-declaration - - begin - (?=(?:((new|public|protected|internal|private)\s+)*)(?:enum)\s+) - end - (?<=\}) - patterns - - - include - #comment - - - name - storage.modifier.cs - match - \b(new|public|protected|internal|private)\b - - - begin - (?=enum) - end - (?=\{) - patterns - - - match - (enum)\s+([_$[:alpha:]][_$[:alnum:]]*) - captures - - 1 - - name - keyword.other.enum.cs - - 2 - - name - entity.name.type.enum.cs - - - - - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{) - patterns - - - include - #type - - - - - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #comment - - - include - #attribute-section - - - include - #punctuation-comma - - - begin - [_$[:alpha:]][_$[:alnum:]]* - beginCaptures - - 0 - - name - variable.other.enummember.cs - - - end - (?=(,|\})) - patterns - - - include - #comment - - - include - #variable-initializer - - - - - - - - interface-declaration - - begin - (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:interface)\s+) - end - (?<=\}) - patterns - - - include - #comment - - - name - storage.modifier.cs - match - \b(new|public|protected|internal|private|partial)\b - - - begin - (?=interface) - end - (?=\{) - patterns - - - match - (?x) -(interface)\s+ -( - (?<identifier>[_$[:alpha:]][_$[:alnum:]]*) - (\s*<\s*(?:(?:(?:in|out)\s+)?\g<identifier>\s*,\s*)*(?:(?:in|out)\s+)?\g<identifier>\s*>)? -) - captures - - 1 - - name - keyword.other.interface.cs - - 2 - - name - entity.name.type.interface.cs - - - - - include - #generic-constraints - - - include - #base-types - - - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #interface-members - - - - - - struct-declaration - - begin - (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:struct)\s+) - end - (?<=\}) - patterns - - - include - #comment - - - name - storage.modifier.cs - match - \b(new|public|protected|internal|private|partial)\b - - - begin - (?=struct) - end - (?=\{) - patterns - - - match - (?x) -(struct)\s+ -( - (?<identifier>[_$[:alpha:]][_$[:alnum:]]*) - (\s*<\s*(?:\g<identifier>\s*,\s*)*\g<identifier>\s*>)? -) - captures - - 1 - - name - keyword.other.struct.cs - - 2 - - name - entity.name.type.struct.cs - - - - - include - #generic-constraints - - - include - #base-types - - - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #struct-members - - - - - - base-types - - begin - : - beginCaptures - - 0 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where) - patterns - - - include - #type - - - include - #punctuation-comma - - - - generic-constraints - - begin - (where)\s+([_$[:alpha:]][_$[:alnum:]]*)\s*(:) - beginCaptures - - 1 - - name - keyword.other.where.cs - - 2 - - name - storage.type.cs - - 3 - - name - punctuation.separator.colon.cs - - - end - (?=\{|where|;) - patterns - - - name - keyword.other.class.cs - match - \bclass\b - - - name - keyword.other.struct.cs - match - \bstruct\b - - - match - (new)\s*(\()\s*(\)) - captures - - 1 - - name - keyword.other.new.cs - - 2 - - name - punctuation.parenthesis.open.cs - - 3 - - name - punctuation.parenthesis.close.cs - - - - - include - #type - - - include - #punctuation-comma - - - include - #generic-constraints - - - - field-declaration - - begin - (?x) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\b\s+)*)\s* -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s+ -(?<first-field-name>\g<identifier>)\s* -(?!=>|==)(?=,|;|=) - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private|static|readonly|volatile|const)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - include - #type - - - - 3 - - name - entity.name.variable.field.cs - - - end - (?=;) - patterns - - - name - entity.name.variable.field.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #punctuation-comma - - - include - #comment - - - include - #variable-initializer - - - - property-declaration - - begin - (?x) -(?!.*\b(?:class|interface|struct|enum|event)\b) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* -(?<return-type> - (?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? - )\s+ -) -(?<interface-name>\g<type-name>\s*\.\s*)? -(?<property-name>\g<identifier>)\s* -(?=\{|=>|$) - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - include - #type - - - - 5 - - patterns - - - include - #type - - - include - #punctuation-accessor - - - - 6 - - name - entity.name.variable.property.cs - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #property-accessors - - - include - #expression-body - - - include - #variable-initializer - - - - indexer-declaration - - begin - (?x) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s* -(?<return-type> - (?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? - )\s+ -) -(?<interface-name>\g<type-name>\s*\.\s*)? -(?<indexer-name>this)\s* -(?=\[) - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - include - #type - - - - 5 - - patterns - - - include - #type - - - include - #punctuation-accessor - - - - 6 - - name - keyword.other.this.cs - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #bracketed-parameter-list - - - include - #property-accessors - - - include - #expression-body - - - include - #variable-initializer - - - - event-declaration - - begin - (?x) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* -\b(?<event-keyword>event)\b\s* -(?<return-type> - (?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? - )\s+ -) -(?<interface-name>\g<type-name>\s*\.\s*)? -(?<event-names>\g<identifier>(?:\s*,\s*\g<identifier>)*)\s* -(?=\{|;|$) - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - name - keyword.other.event.cs - - 3 - - patterns - - - include - #type - - - - 6 - - patterns - - - include - #type - - - include - #punctuation-accessor - - - - 7 - - patterns - - - name - entity.name.variable.event.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #punctuation-comma - - - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #event-accessors - - - include - #punctuation-comma - - - - property-accessors - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - name - storage.modifier.cs - match - \b(private|protected|internal)\b - - - name - keyword.other.get.cs - match - \b(get)\b - - - name - keyword.other.set.cs - match - \b(set)\b - - - include - #block - - - include - #punctuation-semicolon - - - - event-accessors - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - name - keyword.other.add.cs - match - \b(add)\b - - - name - keyword.other.remove.cs - match - \b(remove)\b - - - include - #block - - - include - #punctuation-semicolon - - - - method-declaration - - begin - (?x) -(?<storage-modifiers>(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s* -(?<return-type> - (?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? - )\s+ -) -(?<interface-name>\g<type-name>\s*\.\s*)? -(?<method-name>\g<identifier>(?:\s*<\s*\g<identifier>(?:\s*,\s*\g<identifier>)*\s*>\s*)?)\s* -(?=\() - beginCaptures - - 1 - - patterns - - - match - \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - include - #type - - - - 5 - - patterns - - - include - #type - - - include - #punctuation-accessor - - - - 6 - - name - entity.name.function.cs - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #generic-constraints - - - include - #expression-body - - - include - #block - - - - constructor-declaration - - begin - (?x) -(?= - # We're a extra careful here to avoid matching field declarations of the shape 'private (int i) x' - (?: - (?<storage-modifiers>(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* - (?<identifier>[_$[:alpha:]][_$[:alnum:]]*)| - (?:\g<identifier>) - )\s* - (?:\() -) - end - (?=\}|;) - patterns - - - match - (?x) -(?<storage-modifiers>(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* -(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s* -(?=\() - captures - - 1 - - patterns - - - match - \b(public|protected|internal|private|extern|static)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - name - entity.name.function.cs - - - - - match - ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() - captures - - 1 - - name - entity.name.function.cs - - - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #expression-body - - - include - #constructor-initializer - - - include - #block - - - - constructor-initializer - - begin - (:)\s*\b(?:(base)|(this))\b\s*(?=\() - beginCaptures - - 1 - - name - punctuation.separator.colon.cs - - 2 - - name - keyword.other.base.cs - - 3 - - name - keyword.other.this.cs - - - end - (?=\{|;) - patterns - - - include - #argument-list - - - - destructor-declaration - - begin - (~)([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() - beginCaptures - - 1 - - name - punctuation.tilde.cs - - 2 - - name - entity.name.function.cs - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #expression-body - - - include - #block - - - - operator-declaration - - begin - (?x) -(?<storage-modifiers>(?:(?:public|static|extern)\s+)*)\s* -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s* -(?<operator-keyword>(?:\b(?:operator)))\s* -(?<operator>(?:\+|-|\*|/|%|&|\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s* -(?=\() - beginCaptures - - 1 - - patterns - - - match - \b(public|static|extern)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - include - #type - - - - 4 - - name - keyword.other.operator.cs - - 5 - - name - entity.name.function.cs - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #expression-body - - - include - #block - - - - conversion-operator-declaration - - begin - (?x) -(?<storage-modifiers>(?:(?:public|static|extern)\s+)*)\s* -(?<explicit-or-implicit-keyword>(?:\b(?:explicit|implicit)))\s* -(?<operator-keyword>(?:\b(?:operator)))\s* -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s* -(?=\() - beginCaptures - - 1 - - patterns - - - match - \b(public|static|extern)\b - captures - - 1 - - name - storage.modifier.cs - - - - - - 2 - - patterns - - - match - \b(explicit)\b - captures - - 1 - - name - keyword.other.explicit.cs - - - - - match - \b(implicit)\b - captures - - 1 - - name - keyword.other.implicit.cs - - - - - - 3 - - name - keyword.other.operator.cs - - 4 - - patterns - - - include - #type - - - - - end - (?=\}|;) - patterns - - - include - #comment - - - include - #parenthesized-parameter-list - - - include - #expression-body - - - include - #block - - - - block - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #statement - - - - variable-initializer - - begin - (?<!=|!)(=)(?!=|>) - beginCaptures - - 1 - - name - keyword.operator.assignment.cs - - - end - (?=[,\);}]) - patterns - - - include - #expression - - - - expression-body - - begin - => - beginCaptures - - 0 - - name - keyword.operator.arrow.cs - - - end - (?=[,\);}]) - patterns - - - include - #expression - - - - control-statement - - patterns - - - name - keyword.control.loop.cs - match - (?<!\.)\b(break|continue)\b - - - name - keyword.control.conditional.cs - match - (?<!\.)\b(else|if)\b - - - - switch-statement - - begin - (?<!\.)\b(switch)\b\s*(?=\() - beginCaptures - - 1 - - name - keyword.control.switch.cs - - - end - (?=\}) - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #expression - - - - - begin - \{ - beginCaptures - - 0 - - name - punctuation.curlybrace.open.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.curlybrace.close.cs - - - patterns - - - include - #switch-label - - - include - #statement - - - - - - switch-label - - patterns - - - begin - (?<!\.)\b(case)\b\s+ - beginCaptures - - 1 - - name - keyword.control.case.cs - - - end - : - endCaptures - - 0 - - name - punctuation.separator.colon.cs - - - patterns - - - include - #expression - - - - - match - (?<!\.)\b(default)\b\s*(:) - captures - - 1 - - name - keyword.control.default.cs - - 2 - - name - punctuation.separator.colon.cs - - - - - - goto-statement - - begin - (?<!\.)\b(goto)\b - beginCaptures - - 1 - - name - keyword.control.goto.cs - - - end - (?=;) - patterns - - - begin - \b(case)\b - beginCaptures - - 1 - - name - keyword.control.case.cs - - - end - (?=;) - patterns - - - include - #expression - - - - - match - \b(default)\b - captures - - 1 - - name - keyword.control.default.cs - - - - - name - entity.name.label.cs - match - [_$[:alpha]][_$[:alnum:]]* - - - - return-statement - - begin - (?<!\.)\b(return)\b - beginCaptures - - 1 - - name - keyword.control.flow.cs - - - end - (?=;) - patterns - - - include - #expression - - - - do-statement - - begin - (?<!\.)\b(do)\b - beginCaptures - - 1 - - name - keyword.control.loop.do.cs - - - end - (?=;|}) - patterns - - - include - #statement - - - - while-statement - - begin - (?<!\.)\b(while)\b\s*(?=\() - beginCaptures - - 1 - - name - keyword.control.loop.while.cs - - - end - (?=;|}) - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #expression - - - - - include - #statement - - - - for-statement - - begin - (?<!\.)\b(for)\b - beginCaptures - - 1 - - name - keyword.control.loop.for.cs - - - end - (?=\;|}) - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #local-variable-declaration - - - include - #expression - - - include - #punctuation-comma - - - include - #punctuation-semicolon - - - - - include - #statement - - - - foreach-statement - - begin - (?<!\.)\b(foreach)\b - beginCaptures - - 1 - - name - keyword.control.loop.foreach.cs - - - end - (?=\;|}) - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - match - (?x) -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s+ -(?<local-name>\g<identifier>)\s+ -\b(?<in-keyword>in)\b - captures - - 1 - - patterns - - - include - #type - - - - 3 - - name - variable.local.cs - - 4 - - name - keyword.control.loop.in.cs - - - - - include - #expression - - - - - include - #statement - - - - try-statement - - begin - (?<!\.)\b(try)\b - beginCaptures - - 1 - - name - keyword.control.try.cs - - - end - (?=\}) - patterns - - - include - #block - - - include - #catch-clause - - - include - #finally-clause - - - - finally-clause - - begin - (?<!\.)\b(finally)\b - beginCaptures - - 1 - - name - keyword.control.try.finally.cs - - - end - (?=\}) - patterns - - - include - #block - - - - catch-clause - - begin - (?<!\.)\b(catch)\b - beginCaptures - - 1 - - name - keyword.control.try.catch.cs - - - end - (?=\}) - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - match - (?x) -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -) -(?:\s+\b(?<local-name>\g<identifier>)\b)? - captures - - 1 - - patterns - - - include - #type - - - - 3 - - name - variable.local.cs - - - - - - - include - #when-clause - - - include - #block - - - include - #finally-clause - - - - when-clause - - begin - (?<!\.)\b(when)\b\s*(\() - beginCaptures - - 1 - - name - keyword.control.try.when.cs - - 2 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #expression - - - - labeled-statement - - match - ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) - captures - - 1 - - name - entity.name.label.cs - - 2 - - name - punctuation.separator.colon.cs - - - - local-declaration - - patterns - - - include - #local-constant-declaration - - - include - #local-variable-declaration - - - - local-variable-declaration - - begin - (?x) -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s+ -(?<first-local-name>\g<identifier>)\s* -(?=,|;|=) - beginCaptures - - 1 - - patterns - - - include - #type - - - - 2 - - name - variable.local.cs - - - end - (?=;) - patterns - - - name - variable.local.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #punctuation-comma - - - include - #comment - - - include - #variable-initializer - - - - local-constant-declaration - - begin - (?x) -(?<const-keyword>\b(?:const)\b)\s* -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s+ -(?<first-local-name>\g<identifier>)\s* -(?=,|;|=) - beginCaptures - - 1 - - name - storage.modifier.cs - - 2 - - patterns - - - include - #type - - - - 3 - - name - variable.local.cs - - - end - (?=;) - patterns - - - name - variable.local.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - include - #punctuation-comma - - - include - #comment - - - include - #variable-initializer - - - - interpolated-string - - name - string.quoted.double.cs - begin - \$" - beginCaptures - - 0 - - name - punctuation.definition.string.begin.cs - - - end - (")|((?:[^\\\n])$) - endCaptures - - 1 - - name - punctuation.definition.string.end.cs - - 2 - - name - invalid.illegal.newline.cs - - - patterns - - - include - #string-character-escape - - - include - #interpolation - - - - verbatim-interpolated-string - - name - string.quoted.double.cs - begin - \$@" - beginCaptures - - 0 - - name - punctuation.definition.string.begin.cs - - - end - "(?=[^"]) - endCaptures - - 0 - - name - punctuation.definition.string.end.cs - - - patterns - - - include - #verbatim-string-character-escape - - - include - #interpolation - - - - interpolation - - name - meta.interpolation.cs - begin - (?<=[^\{])((?:\{\{)*)(\{)(?=[^\{]) - beginCaptures - - 1 - - name - string.quoted.double.cs - - 2 - - name - punctuation.definition.interpolation.begin.cs - - - end - \} - endCaptures - - 0 - - name - punctuation.definition.interpolation.end.cs - - - patterns - - - include - #expression - - - - literal - - patterns - - - include - #boolean-literal - - - include - #null-literal - - - include - #numeric-literal - - - include - #string-literal - - - include - #verbatim-string-literal - - - - boolean-literal - - patterns - - - name - constant.language.boolean.true.cs - match - (?<!\.)\btrue\b - - - name - constant.language.boolean.false.cs - match - (?<!\.)\bfalse\b - - - - null-literal - - name - constant.language.null.cs - match - (?<!\.)\bnull\b - - numeric-literal - - patterns - - - name - constant.numeric.hex.cs - match - \b0(x|X)[0-9a-fA-F_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b - - - name - constant.numeric.binary.cs - match - \b0(b|B)[01_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b - - - name - constant.numeric.decimal.cs - match - \b([0-9_]+)?\.[0-9_]+((e|E)[0-9]+)?(F|f|D|d|M|m)?\b - - - name - constant.numeric.decimal.cs - match - \b[0-9_]+(e|E)[0-9_]+(F|f|D|d|M|m)?\b - - - name - constant.numeric.decimal.cs - match - \b[0-9_]+(F|f|D|d|M|m)\b - - - name - constant.numeric.decimal.cs - match - \b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b - - - - string-literal - - name - string.quoted.double.cs - begin - (?<!@)" - beginCaptures - - 0 - - name - punctuation.definition.string.begin.cs - - - end - (")|((?:[^\\\n])$) - endCaptures - - 1 - - name - punctuation.definition.string.end.cs - - 2 - - name - invalid.illegal.newline.cs - - - patterns - - - include - #string-character-escape - - - - string-character-escape - - name - constant.character.escape.cs - match - \\. - - verbatim-string-literal - - name - string.quoted.double.cs - begin - @" - beginCaptures - - 0 - - name - punctuation.definition.string.begin.cs - - - end - "(?=[^"]) - endCaptures - - 0 - - name - punctuation.definition.string.end.cs - - - patterns - - - include - #verbatim-string-character-escape - - - - verbatim-string-character-escape - - name - constant.character.escape.cs - match - "" - - expression-operators - - patterns - - - name - keyword.operator.bitwise.shift.cs - match - <<|>> - - - name - keyword.operator.comparison.cs - match - ==|!= - - - name - keyword.operator.relational.cs - match - <=|>=|<|> - - - name - keyword.operator.logical.cs - match - \!|&&|\|\| - - - name - keyword.operator.bitwise.cs - match - \&|~|\^|\| - - - name - keyword.operator.assignment.cs - match - \= - - - name - keyword.operator.decrement.cs - match - -- - - - name - keyword.operator.increment.cs - match - \+\+ - - - name - keyword.operator.arithmetic.cs - match - %|\*|/|-|\+ - - - - parenthesized-expression - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #expression - - - - identifier - - name - variable.other.readwrite.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - object-creation-expression - - begin - (?x) -(new)\s+ -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -)\s* -(?=\() - beginCaptures - - 1 - - name - keyword.other.new.cs - - 2 - - patterns - - - include - #type - - - - - end - (?<=\)) - patterns - - - include - #argument-list - - - - bracketed-parameter-list - - begin - (?=(\[)) - beginCaptures - - 1 - - name - punctuation.squarebracket.open.cs - - - end - (?=(\])) - endCaptures - - 1 - - name - punctuation.squarebracket.close.cs - - - patterns - - - begin - (?<=\[) - end - (?=\]) - patterns - - - include - #comment - - - include - #attribute-section - - - name - storage.modifier.cs - match - \b(ref|params|out)\b - - - match - \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,\]]) - captures - - 1 - - name - variable.parameter.cs - - - - - include - #variable-initializer - - - include - #type - - - include - #punctuation-comma - - - - - - parenthesized-parameter-list - - begin - (\() - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - (\)) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #comment - - - include - #attribute-section - - - name - storage.modifier.cs - match - \b(ref|params|out)\b - - - match - \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,)]) - captures - - 1 - - name - variable.parameter.cs - - - - - include - #variable-initializer - - - include - #type - - - include - #punctuation-comma - - - - argument-list - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #named-argument - - - include - #argument - - - include - #punctuation-comma - - - - named-argument - - begin - ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) - beginCaptures - - 1 - - name - variable.parameter.cs - - 2 - - name - punctuation.separator.colon.cs - - - end - (?=(,|\))) - patterns - - - include - #expression - - - - argument - - patterns - - - name - storage.modifier.cs - match - \b(ref|out)\b - - - include - #expression - - - - type - - name - meta.type.cs - patterns - - - include - #comment - - - include - #tuple-type - - - include - #type-builtin - - - include - #type-name - - - include - #type-parameters - - - include - #type-array-suffix - - - - tuple-type - - patterns - - - begin - \( - beginCaptures - - 0 - - name - punctuation.parenthesis.open.cs - - - end - \) - endCaptures - - 0 - - name - punctuation.parenthesis.close.cs - - - patterns - - - include - #tuple-element - - - include - #punctuation-comma - - - - - - tuple-element - - match - (?x) -(?<type-name>(?: - (?:(?<identifier>[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? - (?: - (?:\g<identifier>(?:\s*\.\s*\g<identifier>)*) - (?:\s*<\s*(?:\g<type-name>)(?:\s*,\s*\g<type-name>)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g<type-name>)* - )| - (?:\s*\(\s*(?:\g<type-name>)(?:\s+\g<identifier>)?(?:\s*,\s*(?:\g<type-name>)(?:\s+\g<identifier>)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? -) -(?:\s+(?<tuple-name>\g<identifier>))? - captures - - 1 - - patterns - - - include - #type - - - - 3 - - name - entity.name.variable.tuple.cs - - - - type-builtin - - match - \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\b - captures - - 1 - - name - storage.type.cs - - - - type-name - - patterns - - - match - ([_$[:alpha:]][_$[:alnum:]]*)\s*(\:\:) - captures - - 1 - - name - entity.name.type.alias.cs - - 2 - - name - punctuation.separator.coloncolon.cs - - - - - match - ([_$[:alpha:]][_$[:alnum:]]*)\s*(\.) - captures - - 1 - - name - storage.type.cs - - 2 - - name - punctuation.accessor.cs - - - - - match - (\.)\s*([_$[:alpha:]][_$[:alnum:]]*) - captures - - 1 - - name - punctuation.accessor.cs - - 2 - - name - storage.type.cs - - - - - name - storage.type.cs - match - [_$[:alpha:]][_$[:alnum:]]* - - - - type-parameters - - name - meta.type.parameters.cs - begin - < - beginCaptures - - 0 - - name - punctuation.definition.typeparameters.begin.cs - - - end - > - endCaptures - - 0 - - name - punctuation.definition.typeparameters.end.cs - - - patterns - - - include - #comment - - - include - #type - - - include - #punctuation-comma - - - - type-array-suffix - - begin - \[ - beginCaptures - - 0 - - name - punctuation.squarebracket.open.cs - - - end - \] - endCaptures - - 0 - - name - punctuation.squarebracket.close.cs - - - patterns - - - include - #punctuation-comma - - - - operator-assignment - - name - keyword.operator.assignment.cs - match - (?<!=|!)(=)(?!=) - - punctuation-comma - - name - punctuation.separator.comma.cs - match - , - - punctuation-semicolon - - name - punctuation.terminator.statement.cs - match - ; - - punctuation-accessor - - name - punctuation.accessor.cs - match - \. - - comment - - patterns - - - name - comment.block.cs - begin - /\* - beginCaptures - - 0 - - name - punctuation.definition.comment.cs - - - end - \*/ - endCaptures - - 0 - - name - punctuation.definition.comment.cs - - - - - begin - (^[ \t]+)?(?=//) - beginCaptures - - 1 - - name - punctuation.whitespace.comment.leading.cs - - - end - (?=$) - patterns - - - name - comment.line.double-slash.cs - begin - // - beginCaptures - - 0 - - name - punctuation.definition.comment.cs - - - end - (?=$) - - - - - - - - \ No newline at end of file From d6cf67b4aa564527c8f66c360b349e39df7830a6 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:09:19 -0800 Subject: [PATCH 072/192] Add support for checked/unchecked statements and expressions --- syntaxes/csharp.tmLanguage.yml | 23 ++++++ syntaxes/syntax.md | 3 +- .../syntaxes/checked-unchecked.test.syntax.ts | 73 +++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 + 4 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 test/syntaxes/checked-unchecked.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index e67402801c..09231a9274 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -85,6 +85,7 @@ repository: - include: '#goto-statement' - include: '#return-statement' - include: '#try-statement' + - include: '#checked-unchecked-statement' - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' @@ -94,6 +95,7 @@ repository: expression: patterns: + - include: '#checked-unchecked-expression' - include: '#interpolated-string' - include: '#verbatim-interpolated-string' - include: '#literal' @@ -1039,6 +1041,15 @@ repository: patterns: - include: '#expression' + checked-unchecked-statement: + begin: (? { + before(() => should()); + + describe("Checked/Unchecked", () => { + it("checked statement", () => { + const input = Input.InMethod(` +checked +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Checked, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("unchecked statement", () => { + const input = Input.InMethod(` +unchecked +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Unchecked, + Token.Puncuation.OpenBrace, + Token.Puncuation.CloseBrace + ]); + }); + + it("checked expression", () => { + const input = Input.InMethod(`int x = checked(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Keywords.Checked, + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("unchecked expression", () => { + const input = Input.InMethod(`int x = unchecked(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("int"), + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Keywords.Unchecked, + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 25cdfe7a04..e5c10b840a 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -187,6 +187,7 @@ export namespace Token { export const Break = createToken('break', 'keyword.control.loop.cs'); export const Case = createToken('case', 'keyword.control.case.cs'); export const Catch = createToken('catch', 'keyword.control.try.catch.cs'); + export const Checked = createToken('checked', 'keyword.other.checked.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); export const Continue = createToken('continue', 'keyword.control.loop.cs'); export const Default = createToken('default', 'keyword.control.default.cs'); @@ -218,6 +219,7 @@ export namespace Token { export const This = createToken('this', 'keyword.other.this.cs'); export const Throw = createToken('throw', 'keyword.control.throw.cs'); export const Try = createToken('try', 'keyword.control.try.cs'); + export const Unchecked = createToken('unchecked', 'keyword.other.unchecked.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const When = createToken('when', 'keyword.control.try.when.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); From ef31f1016ef748ab1abcca1cb24dde365f4ccd43 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:15:24 -0800 Subject: [PATCH 073/192] Add support for lock statements --- syntaxes/csharp.tmLanguage.yml | 17 ++++ syntaxes/syntax.md | 1 - test/syntaxes/lock-statements.test.syntax.ts | 99 ++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 4 files changed, 117 insertions(+), 1 deletion(-) create mode 100644 test/syntaxes/lock-statements.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 09231a9274..455bf1a6a5 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -86,6 +86,7 @@ repository: - include: '#return-statement' - include: '#try-statement' - include: '#checked-unchecked-statement' + - include: '#lock-statement' - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' @@ -1050,6 +1051,22 @@ repository: patterns: - include: '#block' + lock-statement: + begin: (? { + before(() => should()); + + describe("Lock statements", () => { + it("single-line lock with embedded statement", () => { + const input = Input.InMethod(`lock (new object()) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("single-line lock with block", () => { + const input = Input.InMethod(`lock (new object()) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("lock with embedded statement", () => { + const input = Input.InMethod(` +lock (new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("lock with block", () => { + const input = Input.InMethod(` +lock (new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index e5c10b840a..8b764516ca 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -207,6 +207,7 @@ export namespace Token { export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const In = createToken('in', 'keyword.control.loop.in.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); + export const Lock = createToken('lock', 'keyword.other.lock.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); export const Operator = createToken('operator', 'keyword.other.operator.cs'); From 4c489c3c8a0290eb9c20792d7b5937dd6e674685 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:26:40 -0800 Subject: [PATCH 074/192] Add support for using statements --- syntaxes/csharp.tmLanguage.yml | 22 ++- syntaxes/syntax.md | 1 - test/syntaxes/using-statements.test.syntax.ts | 151 ++++++++++++++++++ 3 files changed, 171 insertions(+), 3 deletions(-) create mode 100644 test/syntaxes/using-statements.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 455bf1a6a5..58e10db386 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -87,6 +87,7 @@ repository: - include: '#try-statement' - include: '#checked-unchecked-statement' - include: '#lock-statement' + - include: '#using-statement' - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' @@ -1067,6 +1068,23 @@ repository: - include: '#expression' - include: '#statement' + using-statement: + begin: (?\g)\s* - (?=,|;|=) + (?=,|;|=|\)) beginCaptures: '1': patterns: - include: '#type' '2': { name: variable.local.cs } - end: (?=;) + end: (?=;|\)) patterns: - name: variable.local.cs match: '[_$[:alpha:]][_$[:alnum:]]*' diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 70bcfd9196..7f84e8aa4c 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -9,7 +9,6 @@ * casts * compound assignement * jump statements (break, continue, throw) - * using statements * yield statement * unsafe code * XML doc comments diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts new file mode 100644 index 0000000000..ab6b4b9284 --- /dev/null +++ b/test/syntaxes/using-statements.test.syntax.ts @@ -0,0 +1,151 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Using statements", () => { + it("single-line using with expression and embedded statement", () => { + const input = Input.InMethod(`using (new object()) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("single-line using with expression and block", () => { + const input = Input.InMethod(`using (new object()) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("using with expression and embedded statement", () => { + const input = Input.InMethod(` +using (new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("using with expression and block", () => { + const input = Input.InMethod(` +using (new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + + it("using with local variable and embedded statement", () => { + const input = Input.InMethod(` +using (var o = new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("using with local variable and block", () => { + const input = Input.InMethod(` +using (var o = new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Puncuation.OpenParen, + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("object"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Variables.ReadWrite("Do"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace + ]); + }); + }); +}); \ No newline at end of file From 6ec599a8cdc6e30f6616a35402fede8da00f4cbe Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:47:06 -0800 Subject: [PATCH 075/192] Add support for break, continue, throw and yield statements --- syntaxes/csharp.tmLanguage.yml | 41 +++++++++++- syntaxes/syntax.md | 2 - .../iteration-statements.test.syntax.ts | 62 +++++++++++++++++++ .../selection-statements.test.syntax.ts | 51 +++++++++++++++ test/syntaxes/try-statements.test.syntax.ts | 7 +++ test/syntaxes/utils/tokenize.ts | 9 +-- test/syntaxes/yield-statements.test.syntax.ts | 36 +++++++++++ 7 files changed, 199 insertions(+), 9 deletions(-) create mode 100644 test/syntaxes/yield-statements.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 58e10db386..1749a43a6e 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -84,6 +84,9 @@ repository: - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' + - include: '#break-continue-statement' + - include: '#throw-statement' + - include: '#yield-statement' - include: '#try-statement' - include: '#checked-unchecked-statement' - include: '#lock-statement' @@ -824,8 +827,6 @@ repository: control-statement: patterns: - - name: keyword.control.loop.cs - match: (? { ]); }); + it("for loop with break", () => { + + const input = Input.InMethod(` +for (int i = 0; i < 42; i++) +{ + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.For, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Break, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + ]); + }); + + it("for loop with continue", () => { + + const input = Input.InMethod(` +for (int i = 0; i < 42; i++) +{ + continue; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.For, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Continue, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + ]); + }); + it("single-line foreach loop", () => { const input = Input.InMethod(`foreach (int i in numbers) { }`); diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts index 4425be219f..1830ce19ba 100644 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ b/test/syntaxes/selection-statements.test.syntax.ts @@ -249,5 +249,56 @@ default: Token.Puncuation.CloseBrace ]); }); + + it("switch statement with blocks", () => { + const input = Input.InMethod(` +switch (i) { + case 0: + { + goto case 1; + } + case 1: + { + goto default; + } + default: + { + break; + } +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Switch, + Token.Puncuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenBrace, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("0"), + Token.Puncuation.Colon, + Token.Puncuation.OpenBrace, + Token.Keywords.Goto, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + Token.Keywords.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Puncuation.Colon, + Token.Puncuation.OpenBrace, + Token.Keywords.Goto, + Token.Keywords.Default, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + Token.Keywords.Default, + Token.Puncuation.Colon, + Token.Puncuation.OpenBrace, + Token.Keywords.Break, + Token.Puncuation.Semicolon, + Token.Puncuation.CloseBrace, + Token.Puncuation.CloseBrace + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/try-statements.test.syntax.ts b/test/syntaxes/try-statements.test.syntax.ts index 2c36715be1..ab7a6fa298 100644 --- a/test/syntaxes/try-statements.test.syntax.ts +++ b/test/syntaxes/try-statements.test.syntax.ts @@ -127,6 +127,7 @@ catch (Exception ex) const input = Input.InMethod(` try { + throw new Exception(); } catch when (true) { @@ -136,6 +137,12 @@ catch when (true) tokens.should.deep.equal([ Token.Keywords.Try, Token.Puncuation.OpenBrace, + Token.Keywords.Throw, + Token.Keywords.New, + Token.Type("Exception"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon, Token.Puncuation.CloseBrace, Token.Keywords.Catch, Token.Keywords.When, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 8b764516ca..634b86b291 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -184,12 +184,12 @@ export namespace Token { export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); export const Base = createToken('base', 'keyword.other.base.cs'); - export const Break = createToken('break', 'keyword.control.loop.cs'); + export const Break = createToken('break', 'keyword.control.flow.break.cs'); export const Case = createToken('case', 'keyword.control.case.cs'); export const Catch = createToken('catch', 'keyword.control.try.catch.cs'); export const Checked = createToken('checked', 'keyword.other.checked.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); - export const Continue = createToken('continue', 'keyword.control.loop.cs'); + export const Continue = createToken('continue', 'keyword.control.flow.continue.cs'); export const Default = createToken('default', 'keyword.control.default.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); export const Do = createToken('do', 'keyword.control.loop.do.cs'); @@ -212,19 +212,20 @@ export namespace Token { export const New = createToken('new', 'keyword.other.new.cs'); export const Operator = createToken('operator', 'keyword.other.operator.cs'); export const Remove = createToken('remove', 'keyword.other.remove.cs'); - export const Return = createToken('return', 'keyword.control.flow.cs'); + export const Return = createToken('return', 'keyword.control.flow.return.cs'); export const Set = createToken('set', 'keyword.other.set.cs'); export const Static = createToken('static', 'keyword.other.static.cs'); export const Struct = createToken('struct', 'keyword.other.struct.cs'); export const Switch = createToken('switch', 'keyword.control.switch.cs'); export const This = createToken('this', 'keyword.other.this.cs'); - export const Throw = createToken('throw', 'keyword.control.throw.cs'); + export const Throw = createToken('throw', 'keyword.control.flow.throw.cs'); export const Try = createToken('try', 'keyword.control.try.cs'); export const Unchecked = createToken('unchecked', 'keyword.other.unchecked.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const When = createToken('when', 'keyword.control.try.when.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); export const While = createToken('while', 'keyword.control.loop.while.cs'); + export const Yield = createToken('yield', 'keyword.control.flow.yield.cs'); } export namespace Literals { diff --git a/test/syntaxes/yield-statements.test.syntax.ts b/test/syntaxes/yield-statements.test.syntax.ts new file mode 100644 index 0000000000..bbc5dd1acf --- /dev/null +++ b/test/syntaxes/yield-statements.test.syntax.ts @@ -0,0 +1,36 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Yield statements", () => { + it("yield return", () => { + const input = Input.InMethod(`yield return 42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Yield, + Token.Keywords.Return, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon + ]); + }); + + it("yield break", () => { + const input = Input.InMethod(`yield break;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Yield, + Token.Keywords.Break, + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file From 99b8dca4ca512f32cf2f855721a77a312b458400 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 15:53:56 -0800 Subject: [PATCH 076/192] Factor out if and else statements --- syntaxes/csharp.tmLanguage.yml | 102 ++++++++++++++++++-------------- test/syntaxes/utils/tokenize.ts | 4 +- 2 files changed, 59 insertions(+), 47 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 1749a43a6e..7d2f0c624f 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -81,6 +81,8 @@ repository: - include: '#do-statement' - include: '#for-statement' - include: '#foreach-statement' + - include: '#if-statement' + - include: '#else-part' - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' @@ -91,7 +93,6 @@ repository: - include: '#checked-unchecked-statement' - include: '#lock-statement' - include: '#using-statement' - - include: '#control-statement' - include: '#labeled-statement' - include: '#local-declaration' - include: '#expression' @@ -825,50 +826,6 @@ repository: patterns: - include: '#expression' - control-statement: - patterns: - - name: keyword.control.conditional.cs - match: (? Date: Fri, 6 Jan 2017 16:06:34 -0800 Subject: [PATCH 077/192] Move TODO list into YAML file --- syntaxes/csharp.tmLanguage.yml | 14 ++++++++++++-- syntaxes/syntax.md | 15 --------------- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 7d2f0c624f..c57fdd2b14 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -5,8 +5,18 @@ scopeName: source.cs fileTypes: [cs] uuid: f7de61e2-bdde-4e2a-a139-8221b179584e -# Important patterns: -# Identifier: [_$[:alpha:]][_$[:alnum:]]* +# TODO List +# +# * Refinement and tests to ensure proper highlighting while typing +# * Method calls +# * Element access +# * query expressions +# * lambda expressions and anonymous functions +# * array, collection and object initializers +# * casts +# * compound assignement +# * unsafe code +# * XML doc comments patterns: - include: '#comment' diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 35789d216d..e78bb6ebbf 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -1,18 +1,3 @@ -## TODO List: - -* Statements/Expressions: - * Method calls - * Element access - * query expressions - * lambda expressions and anonymous functions - * array, collection and object initializers - * casts - * compound assignement - * unsafe code - * XML doc comments - -* Lots of refinement and tests to ensure proper highlighting while typing - ## Important regular expressions: #### Identifier From 6408bcaf131ecd1fdf250f6400173d2e7039210c Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 17:53:38 -0800 Subject: [PATCH 078/192] Add support for invocation expressions --- syntaxes/csharp.tmLanguage.yml | 41 +++- test/syntaxes/constructors.test.syntax.ts | 2 +- test/syntaxes/destructors.test.syntax.ts | 2 +- test/syntaxes/indexers.test.syntax.ts | 5 +- .../invocation-expressions.test.syntax.ts | 190 ++++++++++++++++++ test/syntaxes/lock-statements.test.syntax.ts | 8 +- .../selection-statements.test.syntax.ts | 24 +-- test/syntaxes/using-statements.test.syntax.ts | 12 +- test/syntaxes/utils/tokenize.ts | 2 + 9 files changed, 259 insertions(+), 27 deletions(-) create mode 100644 test/syntaxes/invocation-expressions.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index c57fdd2b14..ea09fbdc8a 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,7 +8,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * Method calls # * Element access # * query expressions # * lambda expressions and anonymous functions @@ -117,6 +116,7 @@ repository: - include: '#literal' - include: '#expression-operators' - include: '#object-creation-expression' + - include: '#invocation-expression' - include: '#parenthesized-expression' - include: '#identifier' @@ -1370,6 +1370,45 @@ repository: name: variable.other.readwrite.cs match: '[_$[:alpha:]][_$[:alnum:]]*' + invocation-expression: + begin: |- + (?x) + (?= + ( # identifier or type name on left-hand side + (?[_$[:alpha:]][_$[:alnum:]]*)\s* + (?<([^<>]|\g)+>)? + \s*\.\s* + )* + (\g)\s* # method name + (\g)? # type parameters + \s*\( # open paren of argument list + ) + end: (?<=\)) + patterns: + - include: '#member-access-expression' + - include: '#punctuation-accessor' + - name: entity.name.function.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#type-parameters' + - include: '#argument-list' + + member-access-expression: + patterns: + - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*)(?=\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*) + captures: + '1': { name: puncuation.accessor.cs } + '2': { name: variable.other.object.property.cs } + - match: |- + (?x) + ([_$[:alpha:]][_$[:alnum:]]*) + (?<([^<>]|\g)+>)? + (?=\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*) + captures: + '1': { name: variable.other.object.cs } + '2': + patterns: + - include: '#type-parameters' + object-creation-expression: begin: |- (?x) diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index f3ca587ea4..d19c97b968 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -59,7 +59,7 @@ TestClass(int x, int y) Token.Variables.Parameter("y"), Token.Puncuation.CloseParen, Token.Operators.Arrow, - Token.Variables.ReadWrite("Foo"), + Token.Identifiers.MethodName("Foo"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); diff --git a/test/syntaxes/destructors.test.syntax.ts b/test/syntaxes/destructors.test.syntax.ts index b1aca85696..4e574776da 100644 --- a/test/syntaxes/destructors.test.syntax.ts +++ b/test/syntaxes/destructors.test.syntax.ts @@ -35,7 +35,7 @@ describe("Grammar", () => { Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Operators.Arrow, - Token.Variables.ReadWrite("Foo"), + Token.Identifiers.MethodName("Foo"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon]); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index f073400e5d..0886dfa6fa 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -32,8 +32,9 @@ public string this[int index] Token.Keywords.Get, Token.Puncuation.OpenBrace, Token.Keywords.Return, - Token.Variables.ReadWrite("index"), - Token.Variables.ReadWrite("ToString"), + Token.Variables.Object("index"), + Token.Puncuation.Accessor, + Token.Identifiers.MethodName("ToString"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts new file mode 100644 index 0000000000..44b4262cdf --- /dev/null +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -0,0 +1,190 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Invocation expressions", () => { + it("no arguments", () => { + const input = Input.InMethod(`M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("one argument", () => { + const input = Input.InMethod(`M(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("two arguments", () => { + const input = Input.InMethod(`M(19, 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("two named arguments", () => { + const input = Input.InMethod(`M(x: 19, y: 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Variables.Parameter("y"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("ref argument", () => { + const input = Input.InMethod(`M(ref x);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("out argument", () => { + const input = Input.InMethod(`M(out x);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Keywords.Modifiers.Out, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("generic with no arguments", () => { + const input = Input.InMethod(`M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("nested generic with no arguments", () => { + const input = Input.InMethod(`M>();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("double-nested generic with no arguments", () => { + const input = Input.InMethod(`M>>();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("T"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("U"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.TypeParameters.End, + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("member of generic with no arguments", () => { + const input = Input.InMethod(`C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("member of qualified generic with no arguments", () => { + const input = Input.InMethod(`N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("N"), + Token.Puncuation.Accessor, + Token.Variables.Object("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/lock-statements.test.syntax.ts b/test/syntaxes/lock-statements.test.syntax.ts index a2ac7ebff1..e48e353e90 100644 --- a/test/syntaxes/lock-statements.test.syntax.ts +++ b/test/syntaxes/lock-statements.test.syntax.ts @@ -22,7 +22,7 @@ describe("Grammar", () => { Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -42,7 +42,7 @@ describe("Grammar", () => { Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -64,7 +64,7 @@ lock (new object()) Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -88,7 +88,7 @@ lock (new object()) Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts index 1830ce19ba..8397d39cca 100644 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ b/test/syntaxes/selection-statements.test.syntax.ts @@ -19,7 +19,7 @@ describe("Grammar", () => { Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -36,7 +36,7 @@ describe("Grammar", () => { Token.Literals.Boolean.True, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -56,7 +56,7 @@ if (true) Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -77,7 +77,7 @@ if (true) Token.Literals.Boolean.True, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -99,12 +99,12 @@ else Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, Token.Keywords.Else, - Token.Variables.ReadWrite("Dont"), + Token.Identifiers.MethodName("Dont"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -129,14 +129,14 @@ else Token.Literals.Boolean.True, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, Token.Puncuation.CloseBrace, Token.Keywords.Else, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Dont"), + Token.Identifiers.MethodName("Dont"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -158,7 +158,7 @@ else if (false) Token.Puncuation.OpenParen, Token.Literals.Boolean.True, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -167,7 +167,7 @@ else if (false) Token.Puncuation.OpenParen, Token.Literals.Boolean.False, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Dont"), + Token.Identifiers.MethodName("Dont"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -192,7 +192,7 @@ else if (false) Token.Literals.Boolean.True, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -203,7 +203,7 @@ else if (false) Token.Literals.Boolean.False, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Dont"), + Token.Identifiers.MethodName("Dont"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts index ab6b4b9284..6897666fc3 100644 --- a/test/syntaxes/using-statements.test.syntax.ts +++ b/test/syntaxes/using-statements.test.syntax.ts @@ -22,7 +22,7 @@ describe("Grammar", () => { Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -42,7 +42,7 @@ describe("Grammar", () => { Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -64,7 +64,7 @@ using (new object()) Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -88,7 +88,7 @@ using (new object()) Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, @@ -113,7 +113,7 @@ using (var o = new object()) Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon @@ -140,7 +140,7 @@ using (var o = new object()) Token.Puncuation.CloseParen, Token.Puncuation.CloseParen, Token.Puncuation.OpenBrace, - Token.Variables.ReadWrite("Do"), + Token.Identifiers.MethodName("Do"), Token.Puncuation.OpenParen, Token.Puncuation.CloseParen, Token.Puncuation.Semicolon, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 085cd313a0..6ec4789e9e 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -331,6 +331,8 @@ export namespace Token { export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); export const Local = (text: string) => createToken(text, 'variable.local.cs'); + export const Object = (text: string) => createToken(text, 'variable.other.object.cs'); + export const Property = (text: string) => createToken(text, 'variable.other.object.property.cs'); export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); export const Tuple = (text: string) => createToken(text, 'entity.name.variable.tuple.cs'); From df9ced12346ec7db1fbdb1bc040891ca0a263767 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:19:24 -0800 Subject: [PATCH 079/192] Add support for element access expressions --- syntaxes/csharp.tmLanguage.yml | 42 ++++- .../element-access-expressions.test.syntax.ts | 160 ++++++++++++++++++ .../invocation-expressions.test.syntax.ts | 44 +++++ 3 files changed, 241 insertions(+), 5 deletions(-) create mode 100644 test/syntaxes/element-access-expressions.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index ea09fbdc8a..99f2ecf82b 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,7 +8,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * Element access # * query expressions # * lambda expressions and anonymous functions # * array, collection and object initializers @@ -117,6 +116,7 @@ repository: - include: '#expression-operators' - include: '#object-creation-expression' - include: '#invocation-expression' + - include: '#element-access-expression' - include: '#parenthesized-expression' - include: '#identifier' @@ -1379,9 +1379,9 @@ repository: (?<([^<>]|\g)+>)? \s*\.\s* )* - (\g)\s* # method name - (\g)? # type parameters - \s*\( # open paren of argument list + (\g)\s* # method name + (\g)?\s* # type parameters + \( # open paren of argument list ) end: (?<=\)) patterns: @@ -1392,6 +1392,26 @@ repository: - include: '#type-parameters' - include: '#argument-list' + element-access-expression: + begin: |- + (?x) + (?= + ( # identifier or type name on left-hand side + (?[_$[:alpha:]][_$[:alnum:]]*)\s* + (?<([^<>]|\g)+>)? + \s*\.\s* + )* + (\g)\s* # property name + \[ # open square bracket of argument list + ) + end: (?<=\]) + patterns: + - include: '#member-access-expression' + - include: '#punctuation-accessor' + - name: variable.other.object.property.cs + match: '[_$[:alpha:]][_$[:alnum:]]*' + - include: '#bracketed-argument-list' + member-access-expression: patterns: - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*)(?=\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*) @@ -1491,12 +1511,24 @@ repository: - include: '#argument' - include: '#punctuation-comma' + bracketed-argument-list: + begin: \[ + beginCaptures: + '0': { name: punctuation.squarebracket.open.cs } + end: \] + endCaptures: + '0': { name: punctuation.squarebracket.close.cs } + patterns: + - include: '#named-argument' + - include: '#argument' + - include: '#punctuation-comma' + named-argument: begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) beginCaptures: '1': { name: variable.parameter.cs } '2': { name: punctuation.separator.colon.cs } - end: (?=(,|\))) + end: (?=(,|\)|\])) patterns: - include: '#expression' diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts new file mode 100644 index 0000000000..f1572dd05c --- /dev/null +++ b/test/syntaxes/element-access-expressions.test.syntax.ts @@ -0,0 +1,160 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Element access expressions", () => { + it("no arguments", () => { + const input = Input.InMethod(`var o = P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("one argument", () => { + const input = Input.InMethod(`var o = P[42];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("two arguments", () => { + const input = Input.InMethod(`var o = P[19, 23];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("two named arguments", () => { + const input = Input.InMethod(`var o = P[x: 19, y: 23];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Variables.Parameter("x"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Variables.Parameter("y"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("ref argument", () => { + const input = Input.InMethod(`var o = P[ref x];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("out argument", () => { + const input = Input.InMethod(`var o = P[out x];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Keywords.Modifiers.Out, + Token.Variables.ReadWrite("x"), + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("member of generic with no arguments", () => { + const input = Input.InMethod(`var o = C.P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + + it("member of qualified generic with no arguments", () => { + const input = Input.InMethod(`var o = N.C.P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Puncuation.Accessor, + Token.Variables.Object("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Variables.Property("P"), + Token.Puncuation.OpenBracket, + Token.Puncuation.CloseBracket, + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts index 44b4262cdf..8de6d0a0f0 100644 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -186,5 +186,49 @@ describe("Grammar", () => { Token.Puncuation.Semicolon ]); }); + + it("store result member of qualified generic with no arguments", () => { + const input = Input.InMethod(`var o = N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Puncuation.Accessor, + Token.Variables.Object("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("store result of invocation with two named arguments", () => { + const input = Input.InMethod(`var o = M(x: 19, y: 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Puncuation.Comma, + Token.Variables.Parameter("y"), + Token.Puncuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); }); }); \ No newline at end of file From 5573dc5cb20c47e85c6858c0fcbf678335037c76 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:33:29 -0800 Subject: [PATCH 080/192] Add support for cast expressions --- syntaxes/csharp.tmLanguage.yml | 28 ++++++- test/syntaxes/cast-expressions.test.syntax.ts | 83 +++++++++++++++++++ 2 files changed, 110 insertions(+), 1 deletion(-) create mode 100644 test/syntaxes/cast-expressions.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 99f2ecf82b..f5da446a24 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -11,7 +11,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * query expressions # * lambda expressions and anonymous functions # * array, collection and object initializers -# * casts # * compound assignement # * unsafe code # * XML doc comments @@ -114,6 +113,7 @@ repository: - include: '#verbatim-interpolated-string' - include: '#literal' - include: '#expression-operators' + - include: '#cast-expression' - include: '#object-creation-expression' - include: '#invocation-expression' - include: '#element-access-expression' @@ -902,6 +902,14 @@ repository: '1': { name: keyword.control.conditional.if.cs } end: (?=;|}) patterns: + - begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#expression' - include: '#statement' else-part: @@ -910,6 +918,14 @@ repository: '1': { name: keyword.control.conditional.else.cs } end: (?=;|}) patterns: + - begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#expression' - include: '#statement' switch-statement: @@ -1370,6 +1386,16 @@ repository: name: variable.other.readwrite.cs match: '[_$[:alpha:]][_$[:alnum:]]*' + cast-expression: + begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: (\))(?=\s*[_$[:alnum:]\(]) + endCaptures: + '1': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#type' + invocation-expression: begin: |- (?x) diff --git a/test/syntaxes/cast-expressions.test.syntax.ts b/test/syntaxes/cast-expressions.test.syntax.ts new file mode 100644 index 0000000000..54e7d50cb1 --- /dev/null +++ b/test/syntaxes/cast-expressions.test.syntax.ts @@ -0,0 +1,83 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Cast expressions", () => { + it("cast to built-in type in assignment", () => { + const input = Input.InMethod(`var o = (object)42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Puncuation.OpenParen, + Token.Type("object"), + Token.Puncuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon + ]); + }); + + it("cast to generic type in assignment", () => { + const input = Input.InMethod(`var o = (C)42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Puncuation.OpenParen, + Token.Type("C"), + Token.Puncuation.TypeParameters.Begin, + Token.Type("int"), + Token.Puncuation.TypeParameters.End, + Token.Puncuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.Semicolon + ]); + }); + + it("passed to invocation", () => { + const input = Input.InMethod(`M((int)42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + + it("chained cast passed to invocation", () => { + const input = Input.InMethod(`M((int)(object)42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Puncuation.OpenParen, + Token.Puncuation.OpenParen, + Token.Type("int"), + Token.Puncuation.CloseParen, + Token.Puncuation.OpenParen, + Token.Type("object"), + Token.Puncuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Puncuation.CloseParen, + Token.Puncuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file From 2534ff31da4f6e3600717f00d327662f3ced4c55 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:34:50 -0800 Subject: [PATCH 081/192] Fix typo --- test/syntaxes/attributes.test.syntax.ts | 104 +++--- test/syntaxes/boolean-literals.test.syntax.ts | 4 +- test/syntaxes/cast-expressions.test.syntax.ts | 40 +-- .../syntaxes/checked-unchecked.test.syntax.ts | 20 +- test/syntaxes/classes.test.syntax.ts | 144 ++++----- test/syntaxes/constructors.test.syntax.ts | 96 +++--- test/syntaxes/delegates.test.syntax.ts | 34 +- test/syntaxes/destructors.test.syntax.ts | 22 +- .../element-access-expressions.test.syntax.ts | 70 ++-- test/syntaxes/enums.test.syntax.ts | 30 +- test/syntaxes/events.test.syntax.ts | 54 ++-- test/syntaxes/extern-aliases.test.syntax.ts | 4 +- test/syntaxes/fields.test.syntax.ts | 84 ++--- test/syntaxes/indexers.test.syntax.ts | 64 ++-- test/syntaxes/interfaces.test.syntax.ts | 28 +- .../interpolated-strings.test.syntax.ts | 98 +++--- .../invocation-expressions.test.syntax.ts | 138 ++++---- .../iteration-statements.test.syntax.ts | 66 ++-- test/syntaxes/labels.test.syntax.ts | 2 +- test/syntaxes/locals.test.syntax.ts | 18 +- test/syntaxes/lock-statements.test.syntax.ts | 64 ++-- test/syntaxes/methods.test.syntax.ts | 76 ++--- test/syntaxes/namespaces.test.syntax.ts | 44 +-- test/syntaxes/numeric-literals.test.syntax.ts | 8 +- test/syntaxes/operators.test.syntax.ts | 298 +++++++++--------- test/syntaxes/properties.test.syntax.ts | 168 +++++----- .../selection-statements.test.syntax.ts | 188 +++++------ test/syntaxes/string-literals.test.syntax.ts | 34 +- test/syntaxes/structs.test.syntax.ts | 24 +- test/syntaxes/try-statements.test.syntax.ts | 86 ++--- test/syntaxes/type-names.test.syntax.ts | 66 ++-- test/syntaxes/using-directives.test.syntax.ts | 66 ++-- test/syntaxes/using-statements.test.syntax.ts | 96 +++--- test/syntaxes/utils/tokenize.ts | 2 +- test/syntaxes/yield-statements.test.syntax.ts | 4 +- 35 files changed, 1172 insertions(+), 1172 deletions(-) diff --git a/test/syntaxes/attributes.test.syntax.ts b/test/syntaxes/attributes.test.syntax.ts index dffd409026..f01e0da31a 100644 --- a/test/syntaxes/attributes.test.syntax.ts +++ b/test/syntaxes/attributes.test.syntax.ts @@ -16,9 +16,9 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseBracket]); }); it("global attribute with specifier", () => { @@ -27,11 +27,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.AttributeSpecifier("assembly"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Foo"), - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseBracket]); }); it("Two global attributes in same section with specifier", () => { @@ -40,13 +40,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Foo"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("Bar"), - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseBracket]); }); it("Two global attributes in same section with specifier and empty argument lists", () => { @@ -55,17 +55,17 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Foo"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Comma, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, Token.Type("Bar"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with one argument", () => { @@ -74,12 +74,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with two arguments", () => { @@ -88,14 +88,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with three arguments", () => { @@ -104,18 +104,18 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Comma, - Token.Puncuation.String.Begin, + Token.Punctuation.Comma, + Token.Punctuation.String.Begin, Token.Literals.String("text"), - Token.Puncuation.String.End, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with named argument", () => { @@ -124,14 +124,14 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Identifiers.PropertyName("Bar"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with one positional argument and one named argument", () => { @@ -140,16 +140,16 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.PropertyName("Bar"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); it("Global attribute with specifier, one positional argument, and two named arguments", () => { @@ -158,24 +158,24 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.AttributeSpecifier("module"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Foo"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.PropertyName("Bar"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.PropertyName("Baz"), Token.Operators.Assignment, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello"), - Token.Puncuation.String.End, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseBracket]); + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index 0de9433e22..d771dec7d9 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -20,7 +20,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Boolean.True, - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("false", () => { @@ -33,7 +33,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Boolean.False, - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/cast-expressions.test.syntax.ts b/test/syntaxes/cast-expressions.test.syntax.ts index 54e7d50cb1..2e7c5ca67c 100644 --- a/test/syntaxes/cast-expressions.test.syntax.ts +++ b/test/syntaxes/cast-expressions.test.syntax.ts @@ -18,11 +18,11 @@ describe("Grammar", () => { Token.Type("var"), Token.Variables.Local("o"), Token.Operators.Assignment, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("object"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -34,14 +34,14 @@ describe("Grammar", () => { Token.Type("var"), Token.Variables.Local("o"), Token.Operators.Assignment, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.CloseParen, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -51,13 +51,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -67,16 +67,16 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenParen, Token.Type("object"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); }); diff --git a/test/syntaxes/checked-unchecked.test.syntax.ts b/test/syntaxes/checked-unchecked.test.syntax.ts index 36c8f777b4..e1cbe42d14 100644 --- a/test/syntaxes/checked-unchecked.test.syntax.ts +++ b/test/syntaxes/checked-unchecked.test.syntax.ts @@ -19,8 +19,8 @@ checked tokens.should.deep.equal([ Token.Keywords.Checked, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -33,8 +33,8 @@ unchecked tokens.should.deep.equal([ Token.Keywords.Unchecked, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -47,10 +47,10 @@ unchecked Token.Variables.Local("x"), Token.Operators.Assignment, Token.Keywords.Checked, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -63,10 +63,10 @@ unchecked Token.Variables.Local("x"), Token.Operators.Assignment, Token.Keywords.Unchecked, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); }); diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 3bfa377a77..2933cfe54c 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -37,58 +37,58 @@ public abstract class PublicAbstractClass { } Token.Keywords.Modifiers.Public, Token.Keywords.Class, Token.Identifiers.ClassName("PublicClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Internal, Token.Keywords.Class, Token.Identifiers.ClassName("InternalClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Static, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultStaticClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, Token.Keywords.Class, Token.Identifiers.ClassName("PublicStaticClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Sealed, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultSealedClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Sealed, Token.Keywords.Class, Token.Identifiers.ClassName("PublicSealedClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Abstract, Token.Keywords.Class, Token.Identifiers.ClassName("PublicAbstractClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Abstract, Token.Keywords.Class, Token.Identifiers.ClassName("DefaultAbstractClass"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generics in identifier", () => { @@ -99,8 +99,8 @@ public abstract class PublicAbstractClass { } tokens.should.deep.equal([ Token.Keywords.Class, Token.Identifiers.ClassName("Dictionary"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("inheritance", () => { @@ -115,59 +115,59 @@ class PublicClass : Dictionary>, IMap"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Root"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("IInterface"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("Something"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Nested"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Comma, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Comma, Token.Type("Something"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("IInterfaceTwo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Class, Token.Identifiers.ClassName("PublicClass"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Comma, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Comma, Token.Type("IMap"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic constraints", () => { @@ -186,41 +186,41 @@ class PublicClass : Dictionary[]>, ISomething Token.Identifiers.ClassName("PublicClass"), Token.Keywords.Where, Token.Type("T"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("ISomething"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Class, Token.Identifiers.ClassName("PublicClass"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Comma, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Comma, Token.Type("ISomething"), Token.Keywords.Where, Token.Type("T"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("ICar"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Keywords.New, - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("X"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Keywords.Struct, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("nested class", () => { @@ -238,15 +238,15 @@ class Klass tokens.should.deep.equal([ Token.Keywords.Class, Token.Identifiers.ClassName("Klass"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Modifiers.Public, Token.Keywords.Class, Token.Identifiers.ClassName("Nested"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index d19c97b968..9a3d89b37b 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -17,10 +17,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with two parameters", () => { @@ -33,15 +33,15 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with expression body", () => { @@ -51,18 +51,18 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Identifiers.MethodName("Foo"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("static constructor no parameters", () => { @@ -72,10 +72,10 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with 'this' initializer", () => { @@ -85,15 +85,15 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Colon, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, Token.Keywords.This, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with 'this' initializer with ref parameter", () => { @@ -103,18 +103,18 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.Colon, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, Token.Keywords.This, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with 'this' initializer with named parameter", () => { @@ -124,19 +124,19 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.Colon, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, Token.Keywords.This, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Variables.Parameter("y"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("instance constructor with 'base' initializer", () => { @@ -146,15 +146,15 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Colon, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, Token.Keywords.Base, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index c399dcc004..5aacc09600 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -19,9 +19,9 @@ describe("Grammar", () => { Token.Keywords.Delegate, Token.Type("void"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("generic delegate with variance", () => { @@ -33,11 +33,11 @@ describe("Grammar", () => { Token.Keywords.Delegate, Token.Type("TResult"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg1"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("generic delegate with constraints", () => { @@ -53,13 +53,13 @@ delegate void D() Token.Keywords.Delegate, Token.Type("void"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("T1"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("T2"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("delegate with multiple parameters", () => { @@ -71,22 +71,22 @@ delegate void D() Token.Keywords.Delegate, Token.Type("int"), Token.Identifiers.DelegateName("D"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.Type("string"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Keywords.Modifiers.Out, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, Token.Type("object"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, Token.Variables.Parameter("z"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/destructors.test.syntax.ts b/test/syntaxes/destructors.test.syntax.ts index 4e574776da..c8aa7ac748 100644 --- a/test/syntaxes/destructors.test.syntax.ts +++ b/test/syntaxes/destructors.test.syntax.ts @@ -16,12 +16,12 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.Tilde, + Token.Punctuation.Tilde, Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("with expression body", () => { @@ -30,15 +30,15 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.Tilde, + Token.Punctuation.Tilde, Token.Identifiers.MethodName("TestClass"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Identifiers.MethodName("Foo"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts index f1572dd05c..6c9c338631 100644 --- a/test/syntaxes/element-access-expressions.test.syntax.ts +++ b/test/syntaxes/element-access-expressions.test.syntax.ts @@ -19,9 +19,9 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -34,10 +34,10 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -50,12 +50,12 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -68,16 +68,16 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Variables.Parameter("x"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Parameter("y"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -90,11 +90,11 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.Modifiers.Ref, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -107,11 +107,11 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Keywords.Modifiers.Out, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -124,14 +124,14 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); @@ -144,16 +144,16 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("N"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Object("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Variables.Property("P"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon ]); }); }); diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index df885b04f8..5696390439 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("enum with base type", () => { @@ -30,10 +30,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("byte"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("enum with single member", () => { @@ -44,9 +44,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Variables.EnumMember("M1"), - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace]); }); it("enum with multiple members", () => { @@ -57,13 +57,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("Color"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Variables.EnumMember("Red"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.EnumMember("Green"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.EnumMember("Blue"), - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace]); }); it("enum with initialized member", () => { @@ -82,15 +82,15 @@ enum E tokens.should.deep.equal([ Token.Keywords.Enum, Token.Identifiers.EnumName("E"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Variables.EnumMember("Value1"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("1"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.EnumMember("Value2"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.EnumMember("Value3"), - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 91938891e7..569a8e66e6 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -20,7 +20,7 @@ describe("Grammar", () => { Token.Keywords.Event, Token.Type("Type"), Token.Identifiers.EventName("Event"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("declaration with multiple modifiers", () => { @@ -34,7 +34,7 @@ describe("Grammar", () => { Token.Keywords.Event, Token.Type("Type"), Token.Identifiers.EventName("Event"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("declaration with multiple declarators", () => { @@ -47,9 +47,9 @@ describe("Grammar", () => { Token.Keywords.Event, Token.Type("Type"), Token.Identifiers.EventName("Event1"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.EventName("Event2"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic", () => { @@ -61,21 +61,21 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Public, Token.Keywords.Event, Token.Type("EventHandler"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Comma, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Comma, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("D"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Identifiers.EventName("Event"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("declaration with accessors", () => { @@ -94,14 +94,14 @@ public event Type Event Token.Keywords.Event, Token.Type("Type"), Token.Identifiers.EventName("Event"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Add, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Remove, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); it("explicitly-implemented interface member", () => { @@ -113,17 +113,17 @@ public event Type Event Token.Keywords.Event, Token.Type("EventHandler"), Token.Type("IFoo"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.EventName("Event"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Add, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Remove, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface", () => { @@ -135,7 +135,7 @@ public event Type Event Token.Keywords.Event, Token.Type("EventHandler"), Token.Identifiers.EventName("Event"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/extern-aliases.test.syntax.ts b/test/syntaxes/extern-aliases.test.syntax.ts index 952ba2653b..beeca3e333 100644 --- a/test/syntaxes/extern-aliases.test.syntax.ts +++ b/test/syntaxes/extern-aliases.test.syntax.ts @@ -22,11 +22,11 @@ extern alias Y;`; Token.Keywords.Extern, Token.Keywords.Alias, Token.Variables.Alias("X"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Extern, Token.Keywords.Alias, Token.Variables.Alias("Y"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 737f810a53..b2e63a2110 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -23,17 +23,17 @@ private List field123;`); Token.Keywords.Modifiers.Private, Token.Type("List"), Token.Identifiers.FieldName("_field"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Private, Token.Type("List"), Token.Identifiers.FieldName("field"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Private, Token.Type("List"), Token.Identifiers.FieldName("field123"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic", () => { @@ -44,21 +44,21 @@ private List field123;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Comma, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Comma, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("D"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("_field"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); @@ -77,16 +77,16 @@ string _field3;`); Token.Keywords.Modifiers.ReadOnly, Token.Type("List"), Token.Identifiers.FieldName("_field"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Modifiers.ReadOnly, Token.Type("string"), Token.Identifiers.FieldName("_field2"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Type("string"), Token.Identifiers.FieldName("_field3"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("types", () => { @@ -100,13 +100,13 @@ string[] field123;`); tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.FieldName("field123"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Type("string"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, Token.Identifiers.FieldName("field123"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("assignment", () => { @@ -122,17 +122,17 @@ const bool field = true;`); Token.Type("string"), Token.Identifiers.FieldName("field"), Token.Operators.Assignment, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello"), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon, + Token.Punctuation.String.End, + Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Const, Token.Type("bool"), Token.Identifiers.FieldName("field"), Token.Operators.Assignment, Token.Literals.Boolean.True, - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("declaration with multiple declarators", () => { @@ -145,15 +145,15 @@ const bool field = true;`); Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.FieldName("y"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Identifiers.FieldName("z"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("tuple type with no names and no modifiers", () => { @@ -162,13 +162,13 @@ const bool field = true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("tuple type with no names and private modifier", () => { @@ -178,13 +178,13 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("tuple type with names and no modifiers", () => { @@ -193,15 +193,15 @@ const bool field = true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Tuple("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Tuple("y"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("tuple type with names and private modifier", () => { @@ -211,15 +211,15 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Tuple("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Tuple("y"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 0886dfa6fa..a2b7327a3f 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -24,22 +24,22 @@ public string this[int index] Token.Keywords.Modifiers.Public, Token.Type("string"), Token.Keywords.This, - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.CloseBracket, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.Object("index"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Identifiers.MethodName("ToString"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); it("explicitly-implemented interface member", () => { @@ -50,16 +50,16 @@ public string this[int index] tokens.should.deep.equal([ Token.Type("string"), Token.Type("IFoo"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Keywords.This, - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.CloseBracket, - Token.Puncuation.Semicolon]); + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon]); }); it("declaration in interface", () => { @@ -70,16 +70,16 @@ public string this[int index] tokens.should.deep.equal([ Token.Type("string"), Token.Keywords.This, - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.CloseBracket, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface (read-only)", () => { @@ -90,14 +90,14 @@ public string this[int index] tokens.should.deep.equal([ Token.Type("string"), Token.Keywords.This, - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.CloseBracket, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface (write-only)", () => { @@ -108,14 +108,14 @@ public string this[int index] tokens.should.deep.equal([ Token.Type("string"), Token.Keywords.This, - Token.Puncuation.OpenBracket, + Token.Punctuation.OpenBracket, Token.Type("int"), Token.Variables.Parameter("index"), - Token.Puncuation.CloseBracket, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 7a4f776af2..673f39bca6 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("interface inheritance", () => { @@ -34,14 +34,14 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Interface, Token.Identifiers.InterfaceName("IBar"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic interface", () => { @@ -52,8 +52,8 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic interface with variance", () => { @@ -64,8 +64,8 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic interface with constraints", () => { @@ -78,10 +78,10 @@ interface IBar : IFoo { } Token.Identifiers.InterfaceName("IFoo"), Token.Keywords.Where, Token.Type("T1"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("T2"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/interpolated-strings.test.syntax.ts b/test/syntaxes/interpolated-strings.test.syntax.ts index d174b1271d..0bedfcd422 100644 --- a/test/syntaxes/interpolated-strings.test.syntax.ts +++ b/test/syntaxes/interpolated-strings.test.syntax.ts @@ -19,18 +19,18 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hello "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("one"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String(" world "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("two"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("no interpolations", () => { @@ -42,10 +42,10 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hello world!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("no interpolations due to escaped braces", () => { @@ -57,10 +57,10 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hello {{one}} world {{two}}!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("two interpolations with escaped braces", () => { @@ -72,20 +72,20 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hello "), Token.Literals.String("{{"), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("one"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("}} world "), Token.Literals.String("{{"), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("two"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("}}!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("no interpolations due to double-escaped braces", () => { @@ -97,10 +97,10 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hello {{{{one}}}} world {{{{two}}}}!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("break across two lines (non-verbatim)", () => { @@ -114,14 +114,14 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.Begin, + Token.Punctuation.InterpolatedString.Begin, Token.Literals.String("hell"), // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. Token.IllegalNewLine("o"), Token.Variables.ReadWrite("world"), Token.Operators.Logical.Not, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.IllegalNewLine(";")]); }); @@ -134,18 +134,18 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Punctuation.InterpolatedString.VerbatimBegin, Token.Literals.String("hello "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("one"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String(" world "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("two"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("verbatim with two interpolations and escaped double-quotes", () => { @@ -157,22 +157,22 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Punctuation.InterpolatedString.VerbatimBegin, Token.Literals.String("hello "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("one"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String(" "), Token.Literals.CharacterEscape("\"\""), Token.Literals.String("world"), Token.Literals.CharacterEscape("\"\""), Token.Literals.String(" "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("two"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("break across two lines with two interpolations (verbatim)", () => { @@ -186,18 +186,18 @@ world {two}!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Punctuation.InterpolatedString.VerbatimBegin, Token.Literals.String("hello "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("one"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("world "), - Token.Puncuation.Interpolation.Begin, + Token.Punctuation.Interpolation.Begin, Token.Variables.ReadWrite("two"), - Token.Puncuation.Interpolation.End, + Token.Punctuation.Interpolation.End, Token.Literals.String("!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); it("break across two lines with no interpolations (verbatim)", () => { @@ -211,11 +211,11 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.InterpolatedString.VerbatimBegin, + Token.Punctuation.InterpolatedString.VerbatimBegin, Token.Literals.String("hello"), Token.Literals.String("world!"), - Token.Puncuation.InterpolatedString.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts index 8de6d0a0f0..c5c662768e 100644 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -16,9 +16,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -28,10 +28,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -41,12 +41,12 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -56,16 +56,16 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Variables.Parameter("x"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Parameter("y"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -75,11 +75,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -89,11 +89,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Out, Token.Variables.ReadWrite("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -103,12 +103,12 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -118,15 +118,15 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -136,18 +136,18 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("U"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -157,14 +157,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Variables.Object("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -174,16 +174,16 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Variables.Object("N"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Object("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -196,16 +196,16 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("N"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Object("C"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -218,16 +218,16 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Identifiers.MethodName("M"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Variables.Parameter("x"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Parameter("y"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); }); diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 324b446540..9e8a418908 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -16,11 +16,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.While, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -31,13 +31,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Do, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.While, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -48,21 +48,21 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.For, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Relational.LessThan, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Increment, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, ]); }); @@ -77,23 +77,23 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.For, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Relational.LessThan, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Increment, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Break, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, ]); }); @@ -108,23 +108,23 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.For, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Relational.LessThan, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Variables.ReadWrite("i"), Token.Operators.Increment, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Continue, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, ]); }); @@ -135,14 +135,14 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.ForEach, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Local("i"), Token.Keywords.In, Token.Variables.ReadWrite("numbers"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, ]); }); }); diff --git a/test/syntaxes/labels.test.syntax.ts b/test/syntaxes/labels.test.syntax.ts index c2c851bc5e..fc9d3c31f8 100644 --- a/test/syntaxes/labels.test.syntax.ts +++ b/test/syntaxes/labels.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.LabelName("Foo"), - Token.Puncuation.Colon + Token.Punctuation.Colon ]); }); }); diff --git a/test/syntaxes/locals.test.syntax.ts b/test/syntaxes/locals.test.syntax.ts index 67c958cd20..521573ecc3 100644 --- a/test/syntaxes/locals.test.syntax.ts +++ b/test/syntaxes/locals.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("int"), Token.Variables.Local("x"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -30,7 +30,7 @@ describe("Grammar", () => { Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -41,9 +41,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("int"), Token.Variables.Local("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Local("y"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -56,11 +56,11 @@ describe("Grammar", () => { Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Local("y"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -74,7 +74,7 @@ describe("Grammar", () => { Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -88,11 +88,11 @@ describe("Grammar", () => { Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Variables.Local("y"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("23"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); }); diff --git a/test/syntaxes/lock-statements.test.syntax.ts b/test/syntaxes/lock-statements.test.syntax.ts index e48e353e90..5df41dd6b5 100644 --- a/test/syntaxes/lock-statements.test.syntax.ts +++ b/test/syntaxes/lock-statements.test.syntax.ts @@ -16,16 +16,16 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Lock, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -35,18 +35,18 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Lock, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -58,16 +58,16 @@ lock (new object()) tokens.should.deep.equal([ Token.Keywords.Lock, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -81,18 +81,18 @@ lock (new object()) tokens.should.deep.equal([ Token.Keywords.Lock, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); }); diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 6d6062dcdf..db9dcf9775 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -18,10 +18,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("void"), Token.Identifiers.MethodName("Foo"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("declaration with two parameters", () => { @@ -36,20 +36,20 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("int"), Token.Identifiers.MethodName("Add"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in with generic constraints", () => { @@ -60,16 +60,16 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("TResult"), Token.Identifiers.MethodName("GetString"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("T"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("TResult"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("expression body", () => { @@ -80,18 +80,18 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("int"), Token.Identifiers.MethodName("Add"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("explicitly-implemented interface member", () => { @@ -102,14 +102,14 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("string"), Token.Type("IFoo"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.MethodName("GetString"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("declaration in interface", () => { @@ -120,9 +120,9 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.MethodName("GetString"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("declaration in interface with parameters", () => { @@ -133,17 +133,17 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.MethodName("GetString"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("string"), Token.Variables.Parameter("format"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, Token.Type("object"), - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, Token.Variables.Parameter("args"), - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("declaration in interface with generic constraints", () => { @@ -154,15 +154,15 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("TResult"), Token.Identifiers.MethodName("GetString"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("T"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("TResult"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/namespaces.test.syntax.ts b/test/syntaxes/namespaces.test.syntax.ts index 6f306dca3e..84c8d766dc 100644 --- a/test/syntaxes/namespaces.test.syntax.ts +++ b/test/syntaxes/namespaces.test.syntax.ts @@ -21,8 +21,8 @@ namespace TestNamespace tokens.should.deep.equal([ Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("has a namespace keyword and a dotted name", () => { @@ -36,10 +36,10 @@ namespace Test.Namespace tokens.should.deep.equal([ Token.Keywords.Namespace, Token.Identifiers.NamespaceName("Test"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Identifiers.NamespaceName("Namespace"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("can be nested", () => { @@ -56,14 +56,14 @@ namespace TestNamespace tokens.should.deep.equal([ Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Namespace, Token.Identifiers.NamespaceName("NestedNamespace"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); it("can contain using statements", () => { @@ -88,50 +88,50 @@ namespace TestNamespace tokens.should.deep.equal([ Token.Keywords.Using, Token.Identifiers.NamespaceName("UsingOne"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Using, Token.Identifiers.AliasName("one"), Token.Operators.Assignment, Token.Type("UsingOne"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Something"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Namespace, Token.Identifiers.NamespaceName("TestNamespace"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Using, Token.Identifiers.NamespaceName("UsingTwo"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Using, Token.Identifiers.AliasName("two"), Token.Operators.Assignment, Token.Type("UsingTwo"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Something"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Namespace, Token.Identifiers.NamespaceName("NestedNamespace"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Using, Token.Identifiers.NamespaceName("UsingThree"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Using, Token.Identifiers.AliasName("three"), Token.Operators.Assignment, Token.Type("UsingThree"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Something"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index c0c6467a83..095c44358b 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -20,7 +20,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("hexadecimal zero", () => { @@ -33,7 +33,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Hexadecimal("0x0"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("binary zero", () => { @@ -46,7 +46,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Binary("0b0"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("floating-point zero", () => { @@ -59,7 +59,7 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0.0"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts index a90e3a4031..ae34d71921 100644 --- a/test/syntaxes/operators.test.syntax.ts +++ b/test/syntaxes/operators.test.syntax.ts @@ -21,16 +21,16 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("+"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary -", () => { @@ -44,16 +44,16 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("-"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Operators.Arithmetic.Subtraction, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary !", () => { @@ -67,17 +67,17 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName("!"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.Equals, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary ~", () => { @@ -91,16 +91,16 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("~"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Operators.Bitwise.BitwiseComplement, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary ++", () => { @@ -114,16 +114,16 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("++"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Operators.Increment, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary --", () => { @@ -137,16 +137,16 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("--"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Operators.Decrement, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary true", () => { @@ -160,17 +160,17 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("true"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("unary false", () => { @@ -184,17 +184,17 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("false"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.Equals, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary +", () => { @@ -208,20 +208,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("+"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary -", () => { @@ -235,20 +235,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("-"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Subtraction, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary *", () => { @@ -262,20 +262,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("*"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Multiplication, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary /", () => { @@ -289,20 +289,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("/"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Division, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary %", () => { @@ -316,20 +316,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("%"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Remainder, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary &", () => { @@ -343,20 +343,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("&"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.And, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary |", () => { @@ -370,20 +370,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("|"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.Or, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary ^", () => { @@ -397,20 +397,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("^"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ExclusiveOr, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary <<", () => { @@ -424,20 +424,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("<<"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ShiftLeft, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary >>", () => { @@ -451,20 +451,20 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName(">>"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ShiftRight, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary ==", () => { @@ -478,20 +478,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName("=="), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.Equals, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary !=", () => { @@ -505,20 +505,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName("!="), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary >", () => { @@ -532,20 +532,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName(">"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.GreaterThan, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary <", () => { @@ -559,20 +559,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName("<"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.LessThan, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary >=", () => { @@ -586,20 +586,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName(">="), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.GreaterThanOrEqual, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("binary <=", () => { @@ -613,20 +613,20 @@ describe("Grammar", () => { Token.Type("bool"), Token.Keywords.Operator, Token.Identifiers.MethodName("<="), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), Token.Variables.Parameter("y"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.LessThanOrEqual, Token.Variables.ReadWrite("y"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("implicit conversion", () => { @@ -640,17 +640,17 @@ describe("Grammar", () => { Token.Keywords.Implicit, Token.Keywords.Operator, Token.Type("bool"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("explicit conversion", () => { @@ -664,17 +664,17 @@ describe("Grammar", () => { Token.Keywords.Explicit, Token.Keywords.Operator, Token.Type("bool"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("x"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("with expression body", () => { @@ -688,14 +688,14 @@ describe("Grammar", () => { Token.Type("int"), Token.Keywords.Operator, Token.Identifiers.MethodName("+"), - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), Token.Variables.Parameter("value"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 7790ac2b83..fc299ab901 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -24,21 +24,21 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Literals.Null, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Set, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Variables.ReadWrite("something"), Token.Operators.Assignment, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); it("declaration single line", () => { @@ -50,22 +50,22 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Return, Token.Literals.Null, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Private, Token.Keywords.Set, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Variables.ReadWrite("something"), Token.Operators.Assignment, Token.Variables.ReadWrite("value"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); }); it("declaration without modifiers", () => { @@ -76,12 +76,12 @@ public IBooom Property tokens.should.deep.equal([ Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("auto-property single line", function () { @@ -93,12 +93,12 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("auto-property single line (protected internal)", function () { @@ -111,12 +111,12 @@ public IBooom Property Token.Keywords.Modifiers.Internal, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("auto-property", () => { @@ -133,12 +133,12 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("IBooom"), Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("generic auto-property", () => { @@ -149,23 +149,23 @@ public IBooom Property tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.TypeParameters.End, Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("auto-property initializer", () => { @@ -176,37 +176,37 @@ public IBooom Property tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.TypeParameters.End, Token.Identifiers.PropertyName("Property"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Operators.Assignment, Token.Keywords.New, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("T"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenBracket, - Token.Puncuation.CloseBracket, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon]); + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); }); it("expression body", () => { @@ -221,17 +221,17 @@ private bool prop2 => true;`); Token.Type("string"), Token.Identifiers.PropertyName("prop1"), Token.Operators.Arrow, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello"), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon, + Token.Punctuation.String.End, + Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Private, Token.Type("bool"), Token.Identifiers.PropertyName("prop2"), Token.Operators.Arrow, Token.Literals.Boolean.True, - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("explicitly-implemented interface member", () => { @@ -242,17 +242,17 @@ private bool prop2 => true;`); tokens.should.deep.equal([ Token.Type("string"), Token.Type("IFoo"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("string"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Identifiers.PropertyName("Bar"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface", () => { @@ -263,12 +263,12 @@ private bool prop2 => true;`); tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.PropertyName("Bar"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface (read-only)", () => { @@ -279,10 +279,10 @@ private bool prop2 => true;`); tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.PropertyName("Bar"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Get, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); it("declaration in interface (write-only)", () => { @@ -293,10 +293,10 @@ private bool prop2 => true;`); tokens.should.deep.equal([ Token.Type("string"), Token.Identifiers.PropertyName("Bar"), - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Set, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace]); + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts index 8397d39cca..981a81e484 100644 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ b/test/syntaxes/selection-statements.test.syntax.ts @@ -16,13 +16,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -32,15 +32,15 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -53,13 +53,13 @@ if (true) tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -73,15 +73,15 @@ if (true) tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -96,18 +96,18 @@ else tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, Token.Keywords.Else, Token.Identifiers.MethodName("Dont"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -125,22 +125,22 @@ else tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Else, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Dont"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -155,22 +155,22 @@ else if (false) tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, Token.Keywords.Else, Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.False, - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Dont"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -188,26 +188,26 @@ else if (false) tokens.should.deep.equal([ Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Else, Token.Keywords.If, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.False, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Dont"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -225,28 +225,28 @@ default: tokens.should.deep.equal([ Token.Keywords.Switch, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Variables.ReadWrite("i"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Case, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Keywords.Goto, Token.Keywords.Case, Token.Literals.Numeric.Decimal("1"), - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Case, Token.Literals.Numeric.Decimal("1"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Keywords.Goto, Token.Keywords.Default, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Keywords.Default, - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Keywords.Break, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -270,34 +270,34 @@ switch (i) { tokens.should.deep.equal([ Token.Keywords.Switch, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Variables.ReadWrite("i"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Keywords.Case, Token.Literals.Numeric.Decimal("0"), - Token.Puncuation.Colon, - Token.Puncuation.OpenBrace, + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, Token.Keywords.Goto, Token.Keywords.Case, Token.Literals.Numeric.Decimal("1"), - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Case, Token.Literals.Numeric.Decimal("1"), - Token.Puncuation.Colon, - Token.Puncuation.OpenBrace, + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, Token.Keywords.Goto, Token.Keywords.Default, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Default, - Token.Puncuation.Colon, - Token.Puncuation.OpenBrace, + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, Token.Keywords.Break, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace ]); }); }); diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts index c1cea3d03c..95ec3d1749 100644 --- a/test/syntaxes/string-literals.test.syntax.ts +++ b/test/syntaxes/string-literals.test.syntax.ts @@ -19,10 +19,10 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello world!"), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); }); it("escaped double-quote", () => { @@ -34,13 +34,13 @@ describe("Grammar", () => { Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello "), Token.Literals.CharacterEscape("\\\""), Token.Literals.String("world!"), Token.Literals.CharacterEscape("\\\""), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); }); it("line break before close quote", () => { @@ -54,14 +54,14 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.Literals.String("hello"), // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. Token.IllegalNewLine(" "), Token.Variables.ReadWrite("world"), Token.Operators.Logical.Not, - Token.Puncuation.String.Begin, + Token.Punctuation.String.Begin, Token.IllegalNewLine(";")]); }); @@ -74,10 +74,10 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.VerbatimBegin, + Token.Punctuation.String.VerbatimBegin, Token.Literals.String("hello world!"), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); }); it("escaped double-quote (verbatim)", () => { @@ -89,13 +89,13 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.VerbatimBegin, + Token.Punctuation.String.VerbatimBegin, Token.Literals.String("hello "), Token.Literals.CharacterEscape("\"\""), Token.Literals.String("world!"), Token.Literals.CharacterEscape("\"\""), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); }); it("line break before close quote (verbatim)", () => { @@ -109,11 +109,11 @@ world!";`); Token.Type("string"), Token.Identifiers.FieldName("test"), Token.Operators.Assignment, - Token.Puncuation.String.VerbatimBegin, + Token.Punctuation.String.VerbatimBegin, Token.Literals.String("hello "), Token.Literals.String("world!"), - Token.Puncuation.String.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index 8495ca7042..c7588130ab 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -18,8 +18,8 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Struct, Token.Identifiers.StructName("S"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("struct interface implementation", () => { @@ -33,14 +33,14 @@ struct S : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, Token.Identifiers.InterfaceName("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Struct, Token.Identifiers.StructName("S"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("IFoo"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic struct", () => { @@ -53,8 +53,8 @@ struct S { } tokens.should.deep.equal([ Token.Keywords.Struct, Token.Identifiers.StructName("S"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); it("generic struct with constraints", () => { @@ -69,10 +69,10 @@ struct S where T1 : T2 { } Token.Identifiers.StructName("S"), Token.Keywords.Where, Token.Type("T1"), - Token.Puncuation.Colon, + Token.Punctuation.Colon, Token.Type("T2"), - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace]); + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/try-statements.test.syntax.ts b/test/syntaxes/try-statements.test.syntax.ts index ab7a6fa298..3889146a29 100644 --- a/test/syntaxes/try-statements.test.syntax.ts +++ b/test/syntaxes/try-statements.test.syntax.ts @@ -22,11 +22,11 @@ finally tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Finally, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -42,11 +42,11 @@ catch tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -65,14 +65,14 @@ finally tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Finally, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -88,14 +88,14 @@ catch (Exception) tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("Exception"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -111,15 +111,15 @@ catch (Exception ex) tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("Exception"), Token.Variables.Local("ex"), - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -136,21 +136,21 @@ catch when (true) tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenBrace, Token.Keywords.Throw, Token.Keywords.New, Token.Type("Exception"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, Token.Keywords.When, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); @@ -166,18 +166,18 @@ catch (Exception) when (true) tokens.should.deep.equal([ Token.Keywords.Try, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, Token.Keywords.Catch, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("Exception"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Keywords.When, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Literals.Boolean.True, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, - Token.Puncuation.CloseBrace + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace ]); }); }); diff --git a/test/syntaxes/type-names.test.syntax.ts b/test/syntaxes/type-names.test.syntax.ts index c265969464..0afeb26845 100644 --- a/test/syntaxes/type-names.test.syntax.ts +++ b/test/syntaxes/type-names.test.syntax.ts @@ -18,7 +18,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("object"), Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("qualified name - System.Object", () => { @@ -28,10 +28,10 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Object"), Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("globally-qualified name - global::System.Object", () => { @@ -41,12 +41,12 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.AliasName("global"), - Token.Puncuation.ColonColon, + Token.Punctuation.ColonColon, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Object"), Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("tuple type - (int, int)", () => { @@ -55,13 +55,13 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), - Token.Puncuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic type - List", () => { @@ -71,11 +71,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic type with tuple - List<(int, int)>", () => { @@ -85,15 +85,15 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, - Token.Puncuation.OpenParen, + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.OpenParen, Token.Type("int"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), - Token.Puncuation.CloseParen, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic type with multiple parameters - Dictionary", () => { @@ -103,13 +103,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("int"), - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("qualified generic type - System.Collections.Generic.List", () => { @@ -119,17 +119,17 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Collections"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Generic"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("generic type with nested type - List.Enumerator", () => { @@ -139,13 +139,13 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("int"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Accessor, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, Token.Type("Enumerator"), Token.Identifiers.FieldName("x"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index 0d43f204c6..a87f7f4439 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -18,7 +18,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Using, Token.Identifiers.NamespaceName("System"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("using static type", () => { @@ -30,9 +30,9 @@ describe("Grammar", () => { Token.Keywords.Using, Token.Keywords.Static, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Console"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("namespace alias", () => { @@ -45,7 +45,7 @@ describe("Grammar", () => { Token.Identifiers.AliasName("S"), Token.Operators.Assignment, Token.Type("System"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("type alias", () => { @@ -58,9 +58,9 @@ describe("Grammar", () => { Token.Identifiers.AliasName("C"), Token.Operators.Assignment, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Console"), - Token.Puncuation.Semicolon]); + Token.Punctuation.Semicolon]); }); it("type alias with generic type", () => { @@ -73,18 +73,18 @@ describe("Grammar", () => { Token.Identifiers.AliasName("IntList"), Token.Operators.Assignment, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Collections"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Generic"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Int32"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon]); }); it("type alias with nested generic types", () => { @@ -97,31 +97,31 @@ describe("Grammar", () => { Token.Identifiers.AliasName("X"), Token.Operators.Assignment, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Collections"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Generic"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Dictionary"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Int32"), - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Collections"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("Generic"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("List"), - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Type("System"), - Token.Puncuation.Accessor, + Token.Punctuation.Accessor, Token.Type("String"), - Token.Puncuation.TypeParameters.End, - Token.Puncuation.TypeParameters.End, - Token.Puncuation.Semicolon]); + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon]); }); it("type alias with nested generic types and comments interspersed", () => { @@ -138,31 +138,31 @@ describe("Grammar", () => { Token.Type("Dictionary"), Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, Token.Type("int"), Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.Comma, + Token.Punctuation.Comma, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, Token.Type("List"), Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.Begin, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, Token.Type("string"), Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Puncuation.Semicolon, + Token.Punctuation.Semicolon, Token.Comment.SingleLine.Start, Token.Comment.SingleLine.Text("end")]); }); diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts index 6897666fc3..fb884c8140 100644 --- a/test/syntaxes/using-statements.test.syntax.ts +++ b/test/syntaxes/using-statements.test.syntax.ts @@ -16,16 +16,16 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -35,18 +35,18 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -58,16 +58,16 @@ using (new object()) tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -81,18 +81,18 @@ using (new object()) tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); @@ -104,19 +104,19 @@ using (var o = new object()) tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("var"), Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon ]); }); @@ -130,21 +130,21 @@ using (var o = new object()) tokens.should.deep.equal([ Token.Keywords.Using, - Token.Puncuation.OpenParen, + Token.Punctuation.OpenParen, Token.Type("var"), Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, Token.Type("object"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.CloseParen, - Token.Puncuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Do"), - Token.Puncuation.OpenParen, - Token.Puncuation.CloseParen, - Token.Puncuation.Semicolon, - Token.Puncuation.CloseBrace + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace ]); }); }); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 6ec4789e9e..38ac91a733 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -288,7 +288,7 @@ export namespace Token { } } - export namespace Puncuation { + export namespace Punctuation { export const Accessor = createToken('.', 'punctuation.accessor.cs'); export const CloseBrace = createToken('}', 'punctuation.curlybrace.close.cs'); export const CloseBracket = createToken(']', 'punctuation.squarebracket.close.cs'); diff --git a/test/syntaxes/yield-statements.test.syntax.ts b/test/syntaxes/yield-statements.test.syntax.ts index bbc5dd1acf..6de730eec9 100644 --- a/test/syntaxes/yield-statements.test.syntax.ts +++ b/test/syntaxes/yield-statements.test.syntax.ts @@ -18,7 +18,7 @@ describe("Grammar", () => { Token.Keywords.Yield, Token.Keywords.Return, Token.Literals.Numeric.Decimal("42"), - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); @@ -29,7 +29,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Yield, Token.Keywords.Break, - Token.Puncuation.Semicolon + Token.Punctuation.Semicolon ]); }); }); From 60271d1ea8bc43f115f6c951b6f17c81a9421019 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:49:01 -0800 Subject: [PATCH 082/192] Add regression test for issue #802 --- test/syntaxes/methods.test.syntax.ts | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index db9dcf9775..7d1495f71e 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -164,5 +164,30 @@ int Add(int x, int y) Token.Type("TResult"), Token.Punctuation.Semicolon]); }); + + it("commented parameters are highlighted properly (issue #802)", () => { + + const input = Input.InClass(`public void methodWithParametersCommented(int p1, /*int p2*/, int p3) {}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Type("void"), + Token.Identifiers.MethodName("methodWithParametersCommented"), + Token.Punctuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("p1"), + Token.Punctuation.Comma, + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.Text("int p2"), + Token.Comment.MultiLine.End, + Token.Punctuation.Comma, + Token.Type("int"), + Token.Variables.Parameter("p3"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From 7be2f673907a5ce589c7031a5fe2b3ec9174cc6e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:51:02 -0800 Subject: [PATCH 083/192] Add regression tests for issue #816 --- .../iteration-statements.test.syntax.ts | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 9e8a418908..2e16e9848b 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -145,5 +145,27 @@ for (int i = 0; i < 42; i++) Token.Punctuation.CloseBrace, ]); }); + + it("foreach loop with var (issue #816)", () => { + + const input = Input.InMethod(` +foreach (var s in myList) +{ + +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.ForEach, + Token.Punctuation.OpenParen, + Token.Type("var"), + Token.Variables.Local("s"), + Token.Keywords.In, + Token.Variables.ReadWrite("myList"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + ]); + }); }); }); \ No newline at end of file From 74ee3108bee6a72aedb9c4f3217aa52f4f5bc184 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 18:56:12 -0800 Subject: [PATCH 084/192] Add regression test for issue #830 --- test/syntaxes/methods.test.syntax.ts | 39 ++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 7d1495f71e..4f85b5b348 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -189,5 +189,44 @@ int Add(int x, int y) Token.Punctuation.CloseBrace ]); }); + + it("return type is highlighted properly in interface (issue #830)", () => { + + const input = ` +public interface test +{ + Task test1(List blah); + Task test(List blah); +}`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Interface, + Token.Identifiers.InterfaceName("test"), + Token.Punctuation.OpenBrace, + Token.Type("Task"), + Token.Identifiers.MethodName("test1"), + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("string"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Parameter("blah"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Type("Task"), + Token.Identifiers.MethodName("test"), + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("T"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Parameter("blah"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From 487eb62c0efeee40eac644677beb23e5bbd0019d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 19:17:43 -0800 Subject: [PATCH 085/192] Fix #829 and add regression test --- syntaxes/csharp.tmLanguage.yml | 9 +- test/syntaxes/methods.test.syntax.ts | 159 +++++++++++++++++++++++++++ 2 files changed, 166 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index f5da446a24..38d6e5ff25 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -41,6 +41,7 @@ repository: - include: '#enum-declaration' - include: '#interface-declaration' - include: '#struct-declaration' + - include: '#attribute-section' - include: '#punctuation-semicolon' class-members: @@ -56,6 +57,7 @@ repository: - include: '#destructor-declaration' - include: '#operator-declaration' - include: '#conversion-operator-declaration' + - include: '#attribute-section' - include: '#punctuation-semicolon' struct-members: @@ -71,6 +73,7 @@ repository: - include: '#destructor-declaration' - include: '#operator-declaration' - include: '#conversion-operator-declaration' + - include: '#attribute-section' - include: '#punctuation-semicolon' interface-members: @@ -79,6 +82,7 @@ repository: - include: '#property-declaration' - include: '#indexer-declaration' - include: '#method-declaration' + - include: '#attribute-section' - include: '#punctuation-semicolon' statement: @@ -117,6 +121,7 @@ repository: - include: '#object-creation-expression' - include: '#invocation-expression' - include: '#element-access-expression' + - include: '#member-access-expression' - include: '#parenthesized-expression' - include: '#identifier' @@ -1440,9 +1445,9 @@ repository: member-access-expression: patterns: - - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*)(?=\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*) + - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*)(?=(\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)|\)) captures: - '1': { name: puncuation.accessor.cs } + '1': { name: punctuation.accessor.cs } '2': { name: variable.other.object.property.cs } - match: |- (?x) diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 4f85b5b348..6699408c6e 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -228,5 +228,164 @@ public interface test Token.Punctuation.CloseBrace ]); }); + + it("attributes are highlighted properly (issue #829)", () => { + + const input = ` +namespace Test +{ + public class TestClass + { + [HttpPut] + [Route("/meetups/{id}/users-going")] + public void AddToGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsGoing(id, user.User)); + + [HttpPut] + [Route("/meetups/{id}/users-not-going")] + public void AddToNotGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsNotGoing(id, user.User)); + + [HttpPut] + [Route("/meetups/{id}/users-not-sure-if-going")] + public void AddToNotSureIfGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsNotSureIfGoing(id, user.User)); + } +}`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("Test"), + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("TestClass"), + Token.Punctuation.OpenBrace, + + // [HttpPut] + // [Route("/meetups/{id}/users-going")] + // public void AddToGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsGoing(id, user.User)); + Token.Punctuation.OpenBracket, + Token.Type("HttpPut"), + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Type("Route"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("/meetups/{id}/users-going"), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.Type("void"), + Token.Identifiers.MethodName("AddToGoingUsers"), + Token.Punctuation.OpenParen, + Token.Type("Guid"), + Token.Variables.Parameter("id"), + Token.Punctuation.Comma, + Token.Type("string"), + Token.Variables.Parameter("user"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.Object("_commandSender"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Send"), + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.Type("MarkUserAsGoing"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("id"), + Token.Punctuation.Comma, + Token.Variables.Object("user"), + Token.Punctuation.Accessor, + Token.Variables.Property("User"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + + // [HttpPut] + // [Route("/meetups/{id}/users-not-going")] + // public void AddToNotGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsNotGoing(id, user.User)); + Token.Punctuation.OpenBracket, + Token.Type("HttpPut"), + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Type("Route"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("/meetups/{id}/users-not-going"), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.Type("void"), + Token.Identifiers.MethodName("AddToNotGoingUsers"), + Token.Punctuation.OpenParen, + Token.Type("Guid"), + Token.Variables.Parameter("id"), + Token.Punctuation.Comma, + Token.Type("string"), + Token.Variables.Parameter("user"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.Object("_commandSender"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Send"), + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.Type("MarkUserAsNotGoing"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("id"), + Token.Punctuation.Comma, + Token.Variables.Object("user"), + Token.Punctuation.Accessor, + Token.Variables.Property("User"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + + // [HttpPut] + // [Route("/meetups/{id}/users-not-sure-if-going")] + // public void AddToNotSureIfGoingUsers(Guid id, string user) => _commandSender.Send(new MarkUserAsNotSureIfGoing(id, user.User)); + Token.Punctuation.OpenBracket, + Token.Type("HttpPut"), + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Type("Route"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("/meetups/{id}/users-not-sure-if-going"), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.Type("void"), + Token.Identifiers.MethodName("AddToNotSureIfGoingUsers"), + Token.Punctuation.OpenParen, + Token.Type("Guid"), + Token.Variables.Parameter("id"), + Token.Punctuation.Comma, + Token.Type("string"), + Token.Variables.Parameter("user"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.Object("_commandSender"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Send"), + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.Type("MarkUserAsNotSureIfGoing"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("id"), + Token.Punctuation.Comma, + Token.Variables.Object("user"), + Token.Punctuation.Accessor, + Token.Variables.Property("User"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From d161564d4308246bb735defc066c9f089c08f63c Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 20:19:12 -0800 Subject: [PATCH 086/192] Fix #861 and add regression test --- syntaxes/csharp.tmLanguage.yml | 164 ++++++---------------- test/syntaxes/constructors.test.syntax.ts | 97 +++++++++++++ 2 files changed, 142 insertions(+), 119 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 38d6e5ff25..2665213a92 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -36,6 +36,7 @@ repository: type-declarations: patterns: + - include: '#storage-modifier' - include: '#class-declaration' - include: '#delegate-declaration' - include: '#enum-declaration' @@ -46,33 +47,35 @@ repository: class-members: patterns: + - include: '#storage-modifier' - include: '#type-declarations' - include: '#event-declaration' - include: '#property-declaration' - include: '#indexer-declaration' - include: '#field-declaration' - include: '#variable-initializer' - - include: '#method-declaration' - include: '#constructor-declaration' - include: '#destructor-declaration' - include: '#operator-declaration' - include: '#conversion-operator-declaration' + - include: '#method-declaration' - include: '#attribute-section' - include: '#punctuation-semicolon' struct-members: patterns: + - include: '#storage-modifier' - include: '#type-declarations' - include: '#event-declaration' - include: '#property-declaration' - include: '#indexer-declaration' - include: '#field-declaration' - include: '#variable-initializer' - - include: '#method-declaration' - include: '#constructor-declaration' - include: '#destructor-declaration' - include: '#operator-declaration' - include: '#conversion-operator-declaration' + - include: '#method-declaration' - include: '#attribute-section' - include: '#punctuation-semicolon' @@ -222,13 +225,15 @@ repository: - include: '#using-directive' - include: '#punctuation-semicolon' + storage-modifier: + name: 'storage.modifier.cs' + match: (?(?:\b(?:new|public|protected|internal|private)\b\s+)*) (?(?:\b(?:delegate)\b))\s+ (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? @@ -271,17 +275,12 @@ repository: )\s* (?=\() beginCaptures: - '1': - patterns: - - match: \b(new|public|protected|internal|private)\b - captures: - '1': { name: storage.modifier.cs } - '2': { name: keyword.other.delegate.cs } - '3': + '1': { name: keyword.other.delegate.cs } + '2': patterns: - include: '#type' - # '4': ? is a sub-expression. It's final value is not considered. - '5': { name: entity.name.type.delegate.cs } + # '3': ? is a sub-expression. It's final value is not considered. + '4': { name: entity.name.type.delegate.cs } end: (?=;) patterns: - include: '#comment' @@ -289,12 +288,10 @@ repository: - include: '#generic-constraints' enum-declaration: - begin: (?=(?:((new|public|protected|internal|private)\s+)*)(?:enum)\s+) + begin: (?=enum\s+) end: (?<=\}) patterns: - include: '#comment' - - name: storage.modifier.cs - match: \b(new|public|protected|internal|private)\b - begin: (?=enum) end: (?=\{) patterns: @@ -328,12 +325,10 @@ repository: - include: '#variable-initializer' interface-declaration: - begin: (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:interface)\s+) + begin: (?=interface\s+) end: (?<=\}) patterns: - include: '#comment' - - name: storage.modifier.cs - match: \b(new|public|protected|internal|private|partial)\b - begin: (?=interface) end: (?=\{) patterns: @@ -360,12 +355,10 @@ repository: - include: '#interface-members' struct-declaration: - begin: (?=(?:((new|public|protected|internal|private|partial)\s+)*)(?:struct)\s+) + begin: (?=struct\s+) end: (?<=\}) patterns: - include: '#comment' - - name: storage.modifier.cs - match: \b(new|public|protected|internal|private|partial)\b - begin: (?=struct) end: (?=\{) patterns: @@ -424,7 +417,6 @@ repository: field-declaration: begin: |- (?x) - (?(?:\b(?:new|public|protected|internal|private|static|readonly|volatile|const)\b\s+)*)\s* (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? (?: @@ -441,14 +433,9 @@ repository: (?!=>|==)(?=,|;|=) beginCaptures: '1': - patterns: - - match: \b(new|public|protected|internal|private|static|readonly|volatile|const)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - include: '#type' - '3': { name: entity.name.variable.field.cs } + '2': { name: entity.name.variable.field.cs } end: (?=;) patterns: - name: entity.name.variable.field.cs @@ -460,8 +447,7 @@ repository: property-declaration: begin: |- (?x) - (?!.*\b(?:class|interface|struct|enum|event)\b) - (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* + (?!.*\b(?:class|interface|struct|enum|event)\b)\s* (? (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? @@ -481,20 +467,15 @@ repository: (?=\{|=>|$) beginCaptures: '1': - patterns: - - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - include: '#type' - # '3': ? is a sub-expression. It's final value is not considered. - # '4': ? is a sub-expression. It's final value is not considered. - '5': + # '2': ? is a sub-expression. It's final value is not considered. + # '3': ? is a sub-expression. It's final value is not considered. + '4': patterns: - include: '#type' - include: '#punctuation-accessor' - '6': { name: entity.name.variable.property.cs } + '5': { name: entity.name.variable.property.cs } end: (?=\}|;) patterns: - include: '#comment' @@ -505,7 +486,6 @@ repository: indexer-declaration: begin: |- (?x) - (?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s* (? (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? @@ -525,20 +505,15 @@ repository: (?=\[) beginCaptures: '1': - patterns: - - match: \b(new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - include: '#type' - # '3': ? is a sub-expression. It's final value is not considered. - # '4': ? is a sub-expression. It's final value is not considered. - '5': + # '2': ? is a sub-expression. It's final value is not considered. + # '3': ? is a sub-expression. It's final value is not considered. + '4': patterns: - include: '#type' - include: '#punctuation-accessor' - '6': + '5': name: keyword.other.this.cs end: (?=\}|;) patterns: @@ -551,7 +526,6 @@ repository: event-declaration: begin: |- (?x) - (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s* \b(?event)\b\s* (? (?(?: @@ -571,22 +545,17 @@ repository: (?\g(?:\s*,\s*\g)*)\s* (?=\{|;|$) beginCaptures: - '1': - patterns: - - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b - captures: - '1': { name: storage.modifier.cs } - '2': { name: keyword.other.event.cs } - '3': + '1': { name: keyword.other.event.cs } + '2': patterns: - include: '#type' - # '4': ? is a sub-expression. It's final value is not considered. - # '5': ? is a sub-expression. It's final value is not considered. - '6': + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + '5': patterns: - include: '#type' - include: '#punctuation-accessor' - '7': + '6': patterns: - name: entity.name.variable.event.cs match: '[_$[:alpha:]][_$[:alnum:]]*' @@ -632,7 +601,6 @@ repository: method-declaration: begin: |- (?x) - (?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s* (? (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? @@ -652,21 +620,15 @@ repository: (?=\() beginCaptures: '1': - patterns: - - match: \b(new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - include: '#type' - # '3': ? is a sub-expression. It's final value is not considered. - # '4': ? is a sub-expression. It's final value is not considered. - '5': + # '2': ? is a sub-expression. It's final value is not considered. + # '3': ? is a sub-expression. It's final value is not considered. + '4': patterns: - include: '#type' - include: '#punctuation-accessor' - '6': - name: entity.name.function.cs + '5': { name: entity.name.function.cs } end: (?=\}|;) patterns: - include: '#comment' @@ -676,35 +638,11 @@ repository: - include: '#block' constructor-declaration: - begin: |- - (?x) - (?= - # We're a extra careful here to avoid matching field declarations of the shape 'private (int i) x' - (?: - (?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* - (?[_$[:alpha:]][_$[:alnum:]]*)| - (?:\g) - )\s* - (?:\() - ) + begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + beginCaptures: + '1': { name: entity.name.function.cs } end: (?=\}|;) patterns: - - match: |- - (?x) - (?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s* - (?[_$[:alpha:]][_$[:alnum:]]*)\s* - (?=\() - captures: - '1': - patterns: - - match: \b(public|protected|internal|private|extern|static)\b - captures: - '1': { name: storage.modifier.cs } - '2': - name: entity.name.function.cs - - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() - captures: - '1': { name: entity.name.function.cs } - include: '#comment' - include: '#parenthesized-parameter-list' - include: '#expression-body' @@ -736,7 +674,6 @@ repository: operator-declaration: begin: |- (?x) - (?(?:(?:public|static|extern)\s+)*)\s* (?(?: (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? (?: @@ -754,16 +691,11 @@ repository: (?=\() beginCaptures: '1': - patterns: - - match: \b(public|static|extern)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - include: '#type' - # '3': ? is a sub-expression. It's final value is not considered. - '4': { name: keyword.other.operator.cs } - '5': { name: entity.name.function.cs } + # '2': ? is a sub-expression. It's final value is not considered. + '3': { name: keyword.other.operator.cs } + '4': { name: entity.name.function.cs } end: (?=\}|;) patterns: - include: '#comment' @@ -774,7 +706,6 @@ repository: conversion-operator-declaration: begin: |- (?x) - (?(?:(?:public|static|extern)\s+)*)\s* (?(?:\b(?:explicit|implicit)))\s* (?(?:\b(?:operator)))\s* (?(?: @@ -792,11 +723,6 @@ repository: (?=\() beginCaptures: '1': - patterns: - - match: \b(public|static|extern)\b - captures: - '1': { name: storage.modifier.cs } - '2': patterns: - match: \b(explicit)\b captures: @@ -804,8 +730,8 @@ repository: - match: \b(implicit)\b captures: '1': { name: keyword.other.implicit.cs } - '3': { name: keyword.other.operator.cs } - '4': + '2': { name: keyword.other.operator.cs } + '3': patterns: - include: '#type' end: (?=\}|;) diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index 9a3d89b37b..c21de143a3 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -23,6 +23,53 @@ describe("Grammar", () => { Token.Punctuation.CloseBrace]); }); + it("public instance constructor with no parameters", () => { + + const input = Input.InClass(`public TestClass() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("TestClass"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); + + it("public instance constructor with one parameter", () => { + + const input = Input.InClass(`public TestClass(int x) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("TestClass"), + Token.Punctuation.OpenParen, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); + + it("public instance constructor with one ref parameter", () => { + + const input = Input.InClass(`public TestClass(ref int x) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("TestClass"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Type("int"), + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); + it("instance constructor with two parameters", () => { const input = Input.InClass(` @@ -96,6 +143,25 @@ TestClass(int x, int y) Token.Punctuation.CloseBrace]); }); + it("public instance constructor with 'this' initializer", () => { + + const input = Input.InClass(`public TestClass() : this(42) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("TestClass"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, + Token.Keywords.This, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); + it("instance constructor with 'this' initializer with ref parameter", () => { const input = Input.InClass(`TestClass(int x) : this(ref x) { }`); @@ -156,5 +222,36 @@ TestClass(int x, int y) Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); + + it("Open multiline comment in front of parameter highlights properly (issue #861)", () => { + + const input = Input.InClass(` +internal WaitHandle(Task self, TT.Task /*task) +{ + this.task = task; + this.selff = self; +} +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Internal, + Token.Identifiers.MethodName("WaitHandle"), + Token.Punctuation.OpenParen, + Token.Type("Task"), + Token.Variables.Parameter("self"), + Token.Punctuation.Comma, + Token.Type("TT"), + Token.Punctuation.Accessor, + Token.Type("Task"), + Token.Comment.MultiLine.Start, + Token.Comment.MultiLine.Text("task)"), + Token.Comment.MultiLine.Text("{"), + Token.Comment.MultiLine.Text(" this.task = task;"), + Token.Comment.MultiLine.Text(" this.selff = self;"), + Token.Comment.MultiLine.Text("}"), + Token.Comment.MultiLine.Text(""), + ]); + }); }); }); \ No newline at end of file From 1ab9c08acfb0f703ff079efaa65c4206f0f0fee0 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 22:57:00 -0800 Subject: [PATCH 087/192] Fix #1078 and add regression tests --- syntaxes/csharp.tmLanguage.yml | 21 ++++- test/syntaxes/string-literals.test.syntax.ts | 81 ++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 3 files changed, 102 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 2665213a92..7406461a2a 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -116,9 +116,11 @@ repository: expression: patterns: - include: '#checked-unchecked-expression' + - include: '#typeof-expression' - include: '#interpolated-string' - include: '#verbatim-interpolated-string' - include: '#literal' + - include: '#this-or-base-expression' - include: '#expression-operators' - include: '#cast-expression' - include: '#object-creation-expression' @@ -1174,6 +1176,17 @@ repository: patterns: - include: '#expression' + typeof-expression: + begin: (? { + + const input = Input.InMethod( +`configContent = rgx.Replace(configContent, $"name{suffix}\\""); +File.WriteAllText(_testConfigFile, configContent);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.ReadWrite("configContent"), + Token.Operators.Assignment, + Token.Variables.Object('rgx'), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Replace"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("configContent"), + Token.Punctuation.Comma, + Token.Punctuation.InterpolatedString.Begin, + Token.Literals.String("name"), + Token.Punctuation.Interpolation.Begin, + Token.Variables.ReadWrite("suffix"), + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Object("File"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("WriteAllText"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("_testConfigFile"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("configContent"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("highlight escaped double-quote properly (issue #1078 - repro 2)", () => { + + const input = Input.InMethod( +`throw new InvalidCastException( + $"The value \\"{this.Value} is of the type \\"{this.Type}\\". You asked for \\"{typeof(T)}\\".");`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Throw, + Token.Keywords.New, + Token.Type("InvalidCastException"), + Token.Punctuation.OpenParen, + Token.Punctuation.InterpolatedString.Begin, + Token.Literals.String("The value "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Value"), + Token.Punctuation.Interpolation.End, + Token.Literals.String(" is of the type "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Type"), + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String(". You asked for "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.TypeOf, + Token.Punctuation.OpenParen, + Token.Type("T"), + Token.Punctuation.CloseParen, + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String("."), + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 38ac91a733..f76704e4a5 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -220,6 +220,7 @@ export namespace Token { export const This = createToken('this', 'keyword.other.this.cs'); export const Throw = createToken('throw', 'keyword.control.flow.throw.cs'); export const Try = createToken('try', 'keyword.control.try.cs'); + export const TypeOf = createToken('typeof', 'keyword.other.typeof.cs'); export const Unchecked = createToken('unchecked', 'keyword.other.unchecked.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const When = createToken('when', 'keyword.control.try.when.cs'); From 813b83e9dbde8561d0e5143d44dbf4f0c95eb42d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 23:10:17 -0800 Subject: [PATCH 088/192] Fix #1084 and add regression test --- syntaxes/csharp.tmLanguage.yml | 16 ++++++------- test/syntaxes/methods.test.syntax.ts | 36 ++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 8 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 7406461a2a..89159352b4 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -478,7 +478,7 @@ repository: - include: '#type' - include: '#punctuation-accessor' '5': { name: entity.name.variable.property.cs } - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#property-accessors' @@ -517,7 +517,7 @@ repository: - include: '#punctuation-accessor' '5': name: keyword.other.this.cs - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#bracketed-parameter-list' @@ -562,7 +562,7 @@ repository: - name: entity.name.variable.event.cs match: '[_$[:alpha:]][_$[:alnum:]]*' - include: '#punctuation-comma' - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#event-accessors' @@ -631,7 +631,7 @@ repository: - include: '#type' - include: '#punctuation-accessor' '5': { name: entity.name.function.cs } - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#parenthesized-parameter-list' @@ -643,7 +643,7 @@ repository: begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() beginCaptures: '1': { name: entity.name.function.cs } - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#parenthesized-parameter-list' @@ -666,7 +666,7 @@ repository: beginCaptures: '1': { name: punctuation.tilde.cs } '2': { name: entity.name.function.cs } - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#parenthesized-parameter-list' @@ -698,7 +698,7 @@ repository: # '2': ? is a sub-expression. It's final value is not considered. '3': { name: keyword.other.operator.cs } '4': { name: entity.name.function.cs } - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#parenthesized-parameter-list' @@ -736,7 +736,7 @@ repository: '3': patterns: - include: '#type' - end: (?=\}|;) + end: (?<=\})|(?=;) patterns: - include: '#comment' - include: '#parenthesized-parameter-list' diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 6699408c6e..c37890a445 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -387,5 +387,41 @@ namespace Test Token.Punctuation.CloseBrace ]); }); + + it("shadowed methods are highlighted properly (issue #1084)", () => { + + const input = Input.InClass(` +private new void foo1() //Correct highlight +{ +} + +new void foo2() //Function name not highlighted +{ +} +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.New, + Token.Type("void"), + Token.Identifiers.MethodName("foo1"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Correct highlight"), + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Modifiers.New, + Token.Type("void"), + Token.Identifiers.MethodName("foo2"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Function name not highlighted"), + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From b09b97ea8fe1ad3643db48553e318674b2980525 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 23:22:14 -0800 Subject: [PATCH 089/192] Add regression test for #1084 --- test/syntaxes/constructors.test.syntax.ts | 94 ++++++++++++++++++++++- 1 file changed, 93 insertions(+), 1 deletion(-) diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index c21de143a3..39c705481b 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -250,7 +250,99 @@ internal WaitHandle(Task self, TT.Task /*task) Token.Comment.MultiLine.Text(" this.task = task;"), Token.Comment.MultiLine.Text(" this.selff = self;"), Token.Comment.MultiLine.Text("}"), - Token.Comment.MultiLine.Text(""), + Token.Comment.MultiLine.Text("") + ]); + }); + + it("Highlight properly within base constructor initializer (issue #782)", () => { + + const input = ` +public class A +{ + public A() : base( + 1, + "abc" + new B(), + new B()) { + var a = 1; + var b = "abc"; + var c = new B(); + var c = new B(); + } +} +`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("A"), + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("A"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, + Token.Keywords.Base, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Comma, + Token.Punctuation.String.Begin, + Token.Literals.String("abc"), + Token.Punctuation.String.End, + Token.Keywords.New, + Token.Type("B"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("char"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, + Token.Keywords.New, + Token.Type("B"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("string"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Type("var"), + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Semicolon, + Token.Type("var"), + Token.Variables.Local("b"), + Token.Operators.Assignment, + Token.Punctuation.String.Begin, + Token.Literals.String("abc"), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon, + Token.Type("var"), + Token.Variables.Local("c"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("B"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("char"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Type("var"), + Token.Variables.Local("c"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("B"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("string"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace ]); }); }); From 1b53e8d2226ba73fa6796005244b30ac069aeca0 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 23:28:18 -0800 Subject: [PATCH 090/192] Add regression test for issue #1097 --- test/syntaxes/fields.test.syntax.ts | 30 +++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index b2e63a2110..f961833be0 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -221,7 +221,33 @@ const bool field = true;`); Token.Identifiers.FieldName("z"), Token.Punctuation.Semicolon]); }); - }); -}); + it("Fields with fully-qualified names are highlighted properly (issue #1097)", () => { + + const input = Input.InClass(` +private CanvasGroup[] groups; +private UnityEngine.UI.Image[] selectedImages; +`); + const tokens = tokenize(input); + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Type("CanvasGroup"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Identifiers.FieldName("groups"), + Token.Punctuation.Semicolon, + Token.Keywords.Modifiers.Private, + Token.Type("UnityEngine"), + Token.Punctuation.Accessor, + Token.Type("UI"), + Token.Punctuation.Accessor, + Token.Type("Image"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Identifiers.FieldName("selectedImages"), + Token.Punctuation.Semicolon + ]); + }); + }); +}); From c49b7332ad9b37d0edf081c981f44c92a42ebac3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 6 Jan 2017 23:36:27 -0800 Subject: [PATCH 091/192] Add regression test for issue #1108 --- test/syntaxes/enums.test.syntax.ts | 72 ++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 5696390439..0934b914f9 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -92,5 +92,77 @@ enum E Token.Variables.EnumMember("Value3"), Token.Punctuation.CloseBrace]); }); + + it("enum members are highligted properly (issue #1108)", () => { + + const input = ` +public enum TestEnum +{ + enum1, + enum2, + enum3, + enum4 +} + +public class TestClass +{ + +} + +public enum TestEnum2 +{ + enum1 = 10, + enum2 = 15, +} + +public class TestClass2 +{ + +} +`; + + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Enum, + Token.Identifiers.EnumName("TestEnum"), + Token.Punctuation.OpenBrace, + Token.Variables.EnumMember("enum1"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("enum2"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("enum3"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("enum4"), + Token.Punctuation.CloseBrace, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("TestClass"), + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Enum, + Token.Identifiers.EnumName("TestEnum2"), + Token.Punctuation.OpenBrace, + Token.Variables.EnumMember("enum1"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("10"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("enum2"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("15"), + Token.Punctuation.Comma, + Token.Punctuation.CloseBrace, + + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("TestClass2"), + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From 8eee866cc487f6ea152911794392fbe8e7495813 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 08:10:19 -0800 Subject: [PATCH 092/192] Allow comments in more places --- syntaxes/csharp.tmLanguage.yml | 4 ++ test/syntaxes/comments.test.syntax.ts | 62 ++++++++++++++++++++++++++- test/syntaxes/utils/tokenize.ts | 26 +++++++++++ 3 files changed, 91 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 89159352b4..d541421d3f 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -36,6 +36,7 @@ repository: type-declarations: patterns: + - include: '#comment' - include: '#storage-modifier' - include: '#class-declaration' - include: '#delegate-declaration' @@ -47,6 +48,7 @@ repository: class-members: patterns: + - include: '#comment' - include: '#storage-modifier' - include: '#type-declarations' - include: '#event-declaration' @@ -64,6 +66,7 @@ repository: struct-members: patterns: + - include: '#comment' - include: '#storage-modifier' - include: '#type-declarations' - include: '#event-declaration' @@ -81,6 +84,7 @@ repository: interface-members: patterns: + - include: '#comment' - include: '#event-declaration' - include: '#property-declaration' - include: '#indexer-declaration' diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 3d5dd2d9f4..dba7320ade 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { should } from 'chai'; -import { tokenize, Token } from './utils/tokenize'; +import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); @@ -41,5 +41,65 @@ describe("Grammar", () => { Token.Comment.MultiLine.Text(" foo "), Token.Comment.MultiLine.End]); }); + + it("in namespace", () => { + + const input = Input.InNamespace(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); + + it("in class", () => { + + const input = Input.InClass(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); + + it("in enum", () => { + + const input = Input.InEnum(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); + + it("in interface", () => { + + const input = Input.InInterface(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); + + it("in struct", () => { + + const input = Input.InStruct(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); + + it("in method", () => { + + const input = Input.InMethod(`// foo`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" foo")]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index f76704e4a5..99b7728a33 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -65,6 +65,19 @@ export class Input { return new Input(lines, { startLine: 0, startIndex: 0, endLine: lines.length - 1, endIndex: lines[lines.length - 1].length }); } + public static InEnum(input: string) { + let text = ` +enum TestEnum { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); + } + public static InClass(input: string) { let text = ` class TestClass { @@ -118,6 +131,19 @@ namespace TestNamespace { return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); } + + public static InStruct(input: string) { + let text = ` +struct TestStruct { + ${input} +}`; + + // ensure consistent line-endings irrelevant of OS + text = text.replace('\r\n', '\n'); + let lines = text.split('\n'); + + return new Input(lines, { startLine: 2, startIndex: 4, endLine: lines.length - 1, endIndex: 0 }); + } } export interface Token { From 75c7168b1fd187b0bd6315e820969b6482ed2180 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 08:18:12 -0800 Subject: [PATCH 093/192] Add array creation expressions --- syntaxes/csharp.tmLanguage.yml | 27 ++++++++++++ .../array-creation-expressions.syntax.test.ts | 41 +++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 test/syntaxes/array-creation-expressions.syntax.test.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index d541421d3f..4bcda3e2de 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -128,6 +128,7 @@ repository: - include: '#expression-operators' - include: '#cast-expression' - include: '#object-creation-expression' + - include: '#array-creation-expression' - include: '#invocation-expression' - include: '#element-access-expression' - include: '#member-access-expression' @@ -1435,6 +1436,32 @@ repository: patterns: - include: '#argument-list' + array-creation-expression: + begin: |- + (?x) + (new)\s+ + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s* + (?=\[) + beginCaptures: + '1': { name: keyword.other.new.cs } + '2': + patterns: + - include: '#type' + end: (?<=\]) + patterns: + - include: '#bracketed-argument-list' + bracketed-parameter-list: begin: (?=(\[)) beginCaptures: diff --git a/test/syntaxes/array-creation-expressions.syntax.test.ts b/test/syntaxes/array-creation-expressions.syntax.test.ts new file mode 100644 index 0000000000..b67ba00da9 --- /dev/null +++ b/test/syntaxes/array-creation-expressions.syntax.test.ts @@ -0,0 +1,41 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Array creation expressions", () => { + it("passed as argument", () => { + + const input = Input.InMethod(`c.abst(ref s, new int[] {1, i, i});`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("abst"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("s"), + Token.Punctuation.Comma, + Token.Keywords.New, + Token.Type("int"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("i"), + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file From 544d9837de56ad1dda5347e3cbca4f477434a0a2 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 08:22:50 -0800 Subject: [PATCH 094/192] Add override and virtual modifiers --- syntaxes/csharp.tmLanguage.yml | 2 +- test/syntaxes/methods.test.syntax.ts | 32 ++++++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 ++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 4bcda3e2de..e6fd85c10b 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -234,7 +234,7 @@ repository: storage-modifier: name: 'storage.modifier.cs' - match: (? { + + const input = Input.InClass(`public override M() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Override, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("public virtual", () => { + + const input = Input.InClass(`public virtual M() { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Virtual, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + it("commented parameters are highlighted properly (issue #802)", () => { const input = Input.InClass(`public void methodWithParametersCommented(int p1, /*int p2*/, int p3) {}`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 99b7728a33..8cf55a4f7f 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -195,6 +195,7 @@ export namespace Token { export const Internal = createToken('internal', 'storage.modifier.cs'); export const New = createToken('new', 'storage.modifier.cs'); export const Out = createToken('out', 'storage.modifier.cs'); + export const Override = createToken('override', 'storage.modifier.cs'); export const Params = createToken('params', 'storage.modifier.cs'); export const Partial = createToken('partial', 'storage.modifier.cs'); export const Private = createToken('private', 'storage.modifier.cs'); @@ -204,6 +205,7 @@ export namespace Token { export const Ref = createToken('ref', 'storage.modifier.cs'); export const Sealed = createToken('sealed', 'storage.modifier.cs'); export const Static = createToken('static', 'storage.modifier.cs'); + export const Virtual = createToken('virtual', 'storage.modifier.cs'); } export const Add = createToken('add', 'keyword.other.add.cs'); From c1c575d98e2fe3eb3c13142768af07ede3a8087a Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 08:43:06 -0800 Subject: [PATCH 095/192] Fix cast expressions --- syntaxes/csharp.tmLanguage.yml | 31 ++++++++++++++----- ...tax.test.ts => expressions.test.syntax.ts} | 28 +++++++++++++++-- 2 files changed, 49 insertions(+), 10 deletions(-) rename test/syntaxes/{array-creation-expressions.syntax.test.ts => expressions.test.syntax.ts} (60%) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index e6fd85c10b..4f36dc1fb1 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1336,14 +1336,29 @@ repository: match: '[_$[:alpha:]][_$[:alnum:]]*' cast-expression: - begin: \( - beginCaptures: - '0': { name: punctuation.parenthesis.open.cs } - end: (\))(?=\s*[_$[:alnum:]\(]) - endCaptures: - '1': { name: punctuation.parenthesis.close.cs } - patterns: - - include: '#type' + match: |- + (?x) + (\()\s* + (?(?: + (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?: + (?:\g(?:\s*\.\s*\g)*) + (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? + (?:(?:\*)*)? + (?:(?:\[,*\])*)? + (?:\s*\.\s*\g)* + )| + (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) + (?:(?:\[,*\])*)? + )\s* + (\)) + captures: + '1': { name: punctuation.parenthesis.open.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + '4': { name: punctuation.parenthesis.close.cs } this-or-base-expression: match: \b(?:(base)|(this))\b diff --git a/test/syntaxes/array-creation-expressions.syntax.test.ts b/test/syntaxes/expressions.test.syntax.ts similarity index 60% rename from test/syntaxes/array-creation-expressions.syntax.test.ts rename to test/syntaxes/expressions.test.syntax.ts index b67ba00da9..2f0f2177da 100644 --- a/test/syntaxes/array-creation-expressions.syntax.test.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -9,8 +9,8 @@ import { tokenize, Input, Token } from './utils/tokenize'; describe("Grammar", () => { before(() => should()); - describe("Array creation expressions", () => { - it("passed as argument", () => { + describe("Expressions", () => { + it("array creation expression passed as argument", () => { const input = Input.InMethod(`c.abst(ref s, new int[] {1, i, i});`); const tokens = tokenize(input); @@ -31,11 +31,35 @@ describe("Grammar", () => { Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Comma, Token.Variables.ReadWrite("i"), + Token.Punctuation.Comma, Token.Variables.ReadWrite("i"), Token.Punctuation.CloseBrace, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon ]); }); + + it("arithmetic", () => { + + const input = Input.InMethod(`b = this.i != 1 + (2 - 3);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.ReadWrite("b"), + Token.Operators.Assignment, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("i"), + Token.Operators.Relational.NotEqual, + Token.Literals.Numeric.Decimal("1"), + Token.Operators.Arithmetic.Addition, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("2"), + Token.Operators.Arithmetic.Subtraction, + Token.Literals.Numeric.Decimal("3"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); }); }); \ No newline at end of file From e20b1312973ef2824489de765a75e58c6a55694d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 09:02:02 -0800 Subject: [PATCH 096/192] Add support for initializers and several more fixes --- syntaxes/csharp.tmLanguage.yml | 16 ++++- .../element-access-expressions.test.syntax.ts | 62 +++++++++++++++++++ 2 files changed, 76 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 4f36dc1fb1..7f1664a149 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -113,8 +113,8 @@ repository: - include: '#using-statement' - include: '#labeled-statement' - include: '#local-declaration' - - include: '#expression' - include: '#block' + - include: '#expression' - include: '#punctuation-semicolon' expression: @@ -133,6 +133,7 @@ repository: - include: '#element-access-expression' - include: '#member-access-expression' - include: '#parenthesized-expression' + - include: '#initializer-expression' - include: '#identifier' extern-alias-directive: @@ -1331,6 +1332,17 @@ repository: patterns: - include: '#expression' + initializer-expression: + begin: \{ + beginCaptures: + '0': { name: punctuation.curlybrace.open.cs } + end: \} + endCaptures: + '0': { name: punctuation.curlybrace.close.cs } + patterns: + - include: '#expression' + - include: '#punctuation-comma' + identifier: name: variable.other.readwrite.cs match: '[_$[:alpha:]][_$[:alnum:]]*' @@ -1351,7 +1363,7 @@ repository: (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) (?:(?:\[,*\])*)? )\s* - (\)) + (\))(?=\s*[_$[:alnum:]\(]) captures: '1': { name: punctuation.parenthesis.open.cs } '2': diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts index 6c9c338631..7d9b03d239 100644 --- a/test/syntaxes/element-access-expressions.test.syntax.ts +++ b/test/syntaxes/element-access-expressions.test.syntax.ts @@ -156,5 +156,67 @@ describe("Grammar", () => { Token.Punctuation.Semicolon ]); }); + + it("read/write array element", () => { + const input = Input.InMethod(` +object[] a1 = {(null), (this.a), c}; +a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("object"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Variables.Local("a1"), + Token.Operators.Assignment, + Token.Punctuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Literals.Null, + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, + Token.Punctuation.OpenParen, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("a"), + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, + Token.Variables.ReadWrite("c"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon, + + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("a"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("2"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("c"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + ]); + }); }); }); \ No newline at end of file From 553efe4e23d05d8c6e07a1dae107c8931ba7e750 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 09:10:29 -0800 Subject: [PATCH 097/192] Add regression test for issue #1096 --- test/syntaxes/fields.test.syntax.ts | 70 +++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index f961833be0..21b4b2cf0d 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -249,5 +249,75 @@ private UnityEngine.UI.Image[] selectedImages; Token.Punctuation.Semicolon ]); }); + + it("Fields with dictionary initializer highlights properly (issue #1096)", () => { + + const input = Input.InClass(` +private readonly Dictionary languageToIndex = new Dictionary() +{ + {"Simplified Chinese", 0}, + {"English", 1}, + {"Japanese", 2}, + {"Korean", 3} +}; +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.ReadOnly, + Token.Type("Dictionary"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("string"), + Token.Punctuation.Comma, + Token.Type("int"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.FieldName("languageToIndex"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.Type("Dictionary"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("string"), + Token.Punctuation.Comma, + Token.Type("int"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.OpenBrace, + Token.Punctuation.String.Begin, + Token.Literals.String("Simplified Chinese"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Comma, + Token.Punctuation.OpenBrace, + Token.Punctuation.String.Begin, + Token.Literals.String("English"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Comma, + Token.Punctuation.OpenBrace, + Token.Punctuation.String.Begin, + Token.Literals.String("Japanese"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("2"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Comma, + Token.Punctuation.OpenBrace, + Token.Punctuation.String.Begin, + Token.Literals.String("Korean"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("3"), + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); }); }); From 4b17d8fd96be442e5ff03bc600111d8f378148c0 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 10:21:09 -0800 Subject: [PATCH 098/192] Revamp invocation, element access and member access expressions and add regression test for issue #746 --- syntaxes/csharp.tmLanguage.yml | 165 +++++++++--------- .../invocation-expressions.test.syntax.ts | 42 ++++- test/syntaxes/string-literals.test.syntax.ts | 71 ++++++-- 3 files changed, 187 insertions(+), 91 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 7f1664a149..0fe3acef28 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -14,6 +14,7 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * compound assignement # * unsafe code # * XML doc comments +# * preprocesser patterns: - include: '#comment' @@ -129,15 +130,15 @@ repository: - include: '#cast-expression' - include: '#object-creation-expression' - include: '#array-creation-expression' + - include: '#member-access-expression' - include: '#invocation-expression' - include: '#element-access-expression' - - include: '#member-access-expression' - include: '#parenthesized-expression' - include: '#initializer-expression' - include: '#identifier' extern-alias-directive: - begin: \s*(extern)\b\s*(alias)\b\s*([_$[:alpha:]][_$[:alnum:]]*) + begin: \s*(extern)\b\s*(alias)\b\s*([_[:alpha:]][_[:alnum:]]*) beginCaptures: '1': { name: keyword.other.extern.cs } '2': { name: keyword.other.alias.cs } @@ -153,7 +154,7 @@ repository: end: (?=;) patterns: - include: '#type' - - begin: \b(using)\s+(?=([_$[:alpha:]][_$[:alnum:]]*)\s*=) + - begin: \b(using)\s+(?=([_[:alpha:]][_[:alnum:]]*)\s*=) beginCaptures: '1': { name: keyword.other.using.cs } '2': { name: entity.name.type.alias.cs } @@ -169,7 +170,7 @@ repository: patterns: - include: '#comment' - name: entity.name.type.namespace.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#operator-assignment' attribute-section: @@ -204,7 +205,7 @@ repository: - include: '#punctuation-comma' attribute-named-argument: - begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?==) + begin: ([_[:alpha:]][_[:alnum:]]*)\s*(?==) beginCaptures: '1': { name: entity.name.variable.property.cs } end: (?=(,|\))) @@ -220,7 +221,7 @@ repository: patterns: - include: '#comment' - name: entity.name.type.namespace.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-accessor' - begin: \{ beginCaptures: @@ -246,7 +247,7 @@ repository: end: (?=\{) patterns: # C# grammar: class identifier type-parameter-list[opt] - - match: (class)\s+([_$[:alpha:]][_$[:alnum:]]*(\s*<\s*(?:[_$[:alpha:]][_$[:alnum:]]*\s*,\s*)*(?:[_$[:alpha:]][_$[:alnum:]]*)\s*>)?) + - match: (class)\s+([_[:alpha:]][_[:alnum:]]*(\s*<\s*(?:[_[:alpha:]][_[:alnum:]]*\s*,\s*)*(?:[_[:alpha:]][_[:alnum:]]*)\s*>)?) captures: '1': { name: keyword.other.class.cs } '2': { name: entity.name.type.class.cs } @@ -266,7 +267,7 @@ repository: (?x) (?(?:\b(?:delegate)\b))\s+ (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -304,7 +305,7 @@ repository: end: (?=\{) patterns: # C# grammar: enum identifier - - match: (enum)\s+([_$[:alpha:]][_$[:alnum:]]*) + - match: (enum)\s+([_[:alpha:]][_[:alnum:]]*) captures: '1': { name: keyword.other.enum.cs } '2': { name: entity.name.type.enum.cs } @@ -324,7 +325,7 @@ repository: - include: '#comment' - include: '#attribute-section' - include: '#punctuation-comma' - - begin: '[_$[:alpha:]][_$[:alnum:]]*' + - begin: '[_[:alpha:]][_[:alnum:]]*' beginCaptures: '0': { name: variable.other.enummember.cs } end: (?=(,|\})) @@ -345,7 +346,7 @@ repository: (?x) (interface)\s+ ( - (?[_$[:alpha:]][_$[:alnum:]]*) + (?[_[:alpha:]][_[:alnum:]]*) (\s*<\s*(?:(?:(?:in|out)\s+)?\g\s*,\s*)*(?:(?:in|out)\s+)?\g\s*>)? ) captures: @@ -375,7 +376,7 @@ repository: (?x) (struct)\s+ ( - (?[_$[:alpha:]][_$[:alnum:]]*) + (?[_[:alpha:]][_[:alnum:]]*) (\s*<\s*(?:\g\s*,\s*)*\g\s*>)? ) captures: @@ -402,7 +403,7 @@ repository: - include: '#punctuation-comma' generic-constraints: - begin: (where)\s+([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + begin: (where)\s+([_[:alpha:]][_[:alnum:]]*)\s*(:) beginCaptures: '1': { name: keyword.other.where.cs } '2': { name: storage.type.cs } @@ -426,7 +427,7 @@ repository: begin: |- (?x) (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -447,7 +448,7 @@ repository: end: (?=;) patterns: - name: entity.name.variable.field.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' - include: '#comment' - include: '#variable-initializer' @@ -458,7 +459,7 @@ repository: (?!.*\b(?:class|interface|struct|enum|event)\b)\s* (? (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -496,7 +497,7 @@ repository: (?x) (? (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -537,7 +538,7 @@ repository: \b(?event)\b\s* (? (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -566,7 +567,7 @@ repository: '6': patterns: - name: entity.name.variable.event.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' end: (?<=\})|(?=;) patterns: @@ -611,7 +612,7 @@ repository: (?x) (? (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -646,7 +647,7 @@ repository: - include: '#block' constructor-declaration: - begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + begin: ([_[:alpha:]][_[:alnum:]]*)\s*(?=\() beginCaptures: '1': { name: entity.name.function.cs } end: (?<=\})|(?=;) @@ -668,7 +669,7 @@ repository: - include: "#argument-list" destructor-declaration: - begin: (~)([_$[:alpha:]][_$[:alnum:]]*)\s*(?=\() + begin: (~)([_[:alpha:]][_[:alnum:]]*)\s*(?=\() beginCaptures: '1': { name: punctuation.tilde.cs } '2': { name: entity.name.function.cs } @@ -683,7 +684,7 @@ repository: begin: |- (?x) (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -717,7 +718,7 @@ repository: (?(?:\b(?:explicit|implicit)))\s* (?(?:\b(?:operator)))\s* (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -791,7 +792,7 @@ repository: captures: '1': { name: keyword.control.default.cs } - name: entity.name.label.cs - match: '[_$[:alpha]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' return-statement: begin: (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1022,7 +1023,7 @@ repository: - match: |- (?x) (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1098,7 +1099,7 @@ repository: - include: '#statement' labeled-statement: - match: '([_$[:alpha:]][_$[:alnum:]]*)\s*(:)' + match: '([_[:alpha:]][_[:alnum:]]*)\s*(:)' captures: '1': { name: entity.name.label.cs } '2': { name: punctuation.separator.colon.cs } @@ -1112,7 +1113,7 @@ repository: begin: |- (?x) (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1133,7 +1134,7 @@ repository: end: (?=;|\)) patterns: - name: variable.local.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' - include: '#comment' - include: '#variable-initializer' @@ -1143,7 +1144,7 @@ repository: (?x) (?\b(?:const)\b)\s* (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1165,7 +1166,7 @@ repository: end: (?=;) patterns: - name: variable.local.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' - include: '#comment' - include: '#variable-initializer' @@ -1345,14 +1346,14 @@ repository: identifier: name: variable.other.readwrite.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' cast-expression: match: |- (?x) (\()\s* (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1363,7 +1364,7 @@ repository: (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) (?:(?:\[,*\])*)? )\s* - (\))(?=\s*[_$[:alnum:]\(]) + (\))(?=\s*[_[:alnum:]\(]) captures: '1': { name: punctuation.parenthesis.open.cs } '2': @@ -1381,68 +1382,74 @@ repository: invocation-expression: begin: |- (?x) - (?= - ( # identifier or type name on left-hand side - (?[_$[:alpha:]][_$[:alnum:]]*)\s* - (?<([^<>]|\g)+>)? - \s*\.\s* - )* - (\g)\s* # method name - (\g)?\s* # type parameters - \( # open paren of argument list - ) + (\.)? # preceding dot + ([_[:alpha:]][_[:alnum:]]*)\s* # method name + (?\s*<([^<>]|\g)+>\s*)?\s* # type parameters + (?=\() # open paren of argument list + beginCaptures: + '1': { name: punctuation.accessor.cs } + '2': { name: entity.name.function.cs } + '3': + patterns: + - include: '#type-parameters' end: (?<=\)) patterns: - - include: '#member-access-expression' - - include: '#punctuation-accessor' - - name: entity.name.function.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' - - include: '#type-parameters' - include: '#argument-list' element-access-expression: begin: |- (?x) - (?= - ( # identifier or type name on left-hand side - (?[_$[:alpha:]][_$[:alnum:]]*)\s* - (?<([^<>]|\g)+>)? - \s*\.\s* - )* - (\g)\s* # property name - \[ # open square bracket of argument list - ) + (\.)? # preceding dot + ([_[:alpha:]][_[:alnum:]]*)\s* # property name + (?=\[) # open paren of argument list + beginCaptures: + '1': { name: punctuation.accessor.cs } + '2': { name: variable.other.object.property.cs } end: (?<=\]) patterns: - - include: '#member-access-expression' - - include: '#punctuation-accessor' - - name: variable.other.object.property.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' - include: '#bracketed-argument-list' member-access-expression: patterns: - - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*)(?=(\s*((\.\s*[_$[:alpha:]][_$[:alnum:]]*)|([^_$[:alnum:]\(\[\<])))) + # An identifier with no type parameters and a dot to the left should + # be treated as a property, so long as it isn't followed by a ( or [. + - match: + (?x) + (\.)\s* + ([_[:alpha:]][_[:alnum:]]*)\s* + (?![_[:alnum:]]|\(|\[|<) captures: '1': { name: punctuation.accessor.cs } '2': { name: variable.other.object.property.cs } + # An identifier with type parameters should be treated as an object, + # regardless of whether there is a dot to the left. - match: |- (?x) - ([_$[:alpha:]][_$[:alnum:]]*) - (?<([^<>]|\g)+>)? - (?=\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*) + (\.)?\s* + ([_[:alpha:]][_[:alnum:]]*) + (?\s*<([^<>]|\g)+>\s*) + (?=\s*\.\s*[_[:alpha:]][_[:alnum:]]*) captures: - '1': { name: variable.other.object.cs } - '2': + '1': { name: punctuation.accessor.cs } + '2': { name: variable.other.object.cs } + '3': patterns: - include: '#type-parameters' + # An identifier with no type parameters (and no dot to the left per the + # matches above) should be treated as an object. + - match: |- + (?x) + ([_[:alpha:]][_[:alnum:]]*) + (?=\s*\.\s*[_[:alpha:]][_[:alnum:]]*) + captures: + '1': { name: variable.other.object.cs } object-creation-expression: begin: |- (?x) (new)\s+ (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1468,7 +1475,7 @@ repository: (?x) (new)\s+ (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1506,7 +1513,7 @@ repository: - name: storage.modifier.cs match: \b(ref|params|out)\b # parameter name - - match: \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,\]]) + - match: \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,\]]) captures: '1': { name: variable.parameter.cs } - include: '#variable-initializer' @@ -1526,7 +1533,7 @@ repository: - name: storage.modifier.cs match: \b(ref|params|out)\b # parameter name - - match: \s+([_$[:alpha:]][_$[:alnum:]]*)\s*(?=[,)]) + - match: \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,)]) captures: '1': { name: variable.parameter.cs } - include: '#variable-initializer' @@ -1558,7 +1565,7 @@ repository: - include: '#punctuation-comma' named-argument: - begin: ([_$[:alpha:]][_$[:alnum:]]*)\s*(:) + begin: ([_[:alpha:]][_[:alnum:]]*)\s*(:) beginCaptures: '1': { name: variable.parameter.cs } '2': { name: punctuation.separator.colon.cs } @@ -1598,7 +1605,7 @@ repository: match: |- (?x) (?(?: - (?:(?[_$[:alpha:]][_$[:alnum:]]*)\s*\:\:\s*)? + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? (?: (?:\g(?:\s*\.\s*\g)*) (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? @@ -1624,20 +1631,20 @@ repository: type-name: patterns: - - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(\:\:) + - match: ([_[:alpha:]][_[:alnum:]]*)\s*(\:\:) captures: '1': { name: entity.name.type.alias.cs } '2': { name: punctuation.separator.coloncolon.cs } - - match: ([_$[:alpha:]][_$[:alnum:]]*)\s*(\.) + - match: ([_[:alpha:]][_[:alnum:]]*)\s*(\.) captures: '1': { name: storage.type.cs } '2': { name: punctuation.accessor.cs } - - match: (\.)\s*([_$[:alpha:]][_$[:alnum:]]*) + - match: (\.)\s*([_[:alpha:]][_[:alnum:]]*) captures: '1': { name: punctuation.accessor.cs } '2': { name: storage.type.cs } - name: storage.type.cs - match: '[_$[:alpha:]][_$[:alnum:]]*' + match: '[_[:alpha:]][_[:alnum:]]*' type-parameters: name: meta.type.parameters.cs diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts index c5c662768e..c0e95ec337 100644 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -187,7 +187,7 @@ describe("Grammar", () => { ]); }); - it("store result member of qualified generic with no arguments", () => { + it("store result of member of qualified generic with no arguments", () => { const input = Input.InMethod(`var o = N.C.M();`); const tokens = tokenize(input); @@ -208,7 +208,45 @@ describe("Grammar", () => { Token.Punctuation.Semicolon ]); }); - + + it("store result of qualified method with no arguments", () => { + const input = Input.InMethod(`var o = N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Punctuation.Accessor, + Token.Variables.Property("C"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("store result of this.qualified method with no arguments", () => { + const input = Input.InMethod(`var o = this.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("var"), + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("C"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + it("store result of invocation with two named arguments", () => { const input = Input.InMethod(`var o = M(x: 19, y: 23);`); const tokens = tokenize(input); diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts index e10a9342b6..4c1f9142f7 100644 --- a/test/syntaxes/string-literals.test.syntax.ts +++ b/test/syntaxes/string-literals.test.syntax.ts @@ -45,8 +45,8 @@ describe("Grammar", () => { it("line break before close quote", () => { - const input = Input.InClass( -`string test = "hello + const input = Input.InClass(` +string test = "hello world!";`); const tokens = tokenize(input); @@ -100,8 +100,8 @@ world!";`); it("line break before close quote (verbatim)", () => { - const input = Input.InClass( -`string test = @"hello + const input = Input.InClass(` +string test = @"hello world!";`); const tokens = tokenize(input); @@ -118,9 +118,10 @@ world!";`); it("highlight escaped double-quote properly (issue #1078 - repro 1)", () => { - const input = Input.InMethod( -`configContent = rgx.Replace(configContent, $"name{suffix}\\""); -File.WriteAllText(_testConfigFile, configContent);`); + const input = Input.InMethod(` +configContent = rgx.Replace(configContent, $"name{suffix}\\""); +File.WriteAllText(_testConfigFile, configContent); +`); const tokens = tokenize(input); tokens.should.deep.equal([ @@ -155,9 +156,10 @@ File.WriteAllText(_testConfigFile, configContent);`); it("highlight escaped double-quote properly (issue #1078 - repro 2)", () => { - const input = Input.InMethod( -`throw new InvalidCastException( - $"The value \\"{this.Value} is of the type \\"{this.Type}\\". You asked for \\"{typeof(T)}\\".");`); + const input = Input.InMethod(` +throw new InvalidCastException( + $"The value \\"{this.Value} is of the type \\"{this.Type}\\". You asked for \\"{typeof(T)}\\"."); +`); const tokens = tokenize(input); tokens.should.deep.equal([ @@ -196,5 +198,54 @@ File.WriteAllText(_testConfigFile, configContent);`); Token.Punctuation.Semicolon ]); }); + + it("highlight strings containing braces correctly (issue #746)", () => { + + const input = ` +namespace X +{ + class Y + { + public MethodZ() + { + this.Writer.WriteLine("class CInput{0}Register : public {1}", index, baseClass); + } + } +} +`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("X"), + Token.Punctuation.OpenBrace, + Token.Keywords.Class, + Token.Identifiers.ClassName("Y"), + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("MethodZ"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Writer"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("WriteLine"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("class CInput{0}Register : public {1}"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Variables.ReadWrite("index"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("baseClass"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From 212e5b735c459b50a4be55ff061d32afac3dbcab Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 12:53:34 -0800 Subject: [PATCH 099/192] Add support for preprocessor --- syntaxes/csharp.tmLanguage.yml | 144 +++++- test/syntaxes/preprocessor.test.syntax.ts | 533 ++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 22 + 3 files changed, 689 insertions(+), 10 deletions(-) create mode 100644 test/syntaxes/preprocessor.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 0fe3acef28..1dda07b94e 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -14,9 +14,9 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * compound assignement # * unsafe code # * XML doc comments -# * preprocesser patterns: +- include: '#preprocessor' - include: '#comment' - include: '#directives' - include: '#declarations' @@ -37,6 +37,7 @@ repository: type-declarations: patterns: + - include: '#preprocessor' - include: '#comment' - include: '#storage-modifier' - include: '#class-declaration' @@ -49,6 +50,7 @@ repository: class-members: patterns: + - include: '#preprocessor' - include: '#comment' - include: '#storage-modifier' - include: '#type-declarations' @@ -67,6 +69,7 @@ repository: struct-members: patterns: + - include: '#preprocessor' - include: '#comment' - include: '#storage-modifier' - include: '#type-declarations' @@ -85,6 +88,7 @@ repository: interface-members: patterns: + - include: '#preprocessor' - include: '#comment' - include: '#event-declaration' - include: '#property-declaration' @@ -95,6 +99,7 @@ repository: statement: patterns: + - include: '#preprocessor' - include: '#comment' - include: '#while-statement' - include: '#do-statement' @@ -120,6 +125,8 @@ repository: expression: patterns: + - include: '#preprocessor' + - include: '#comment' - include: '#checked-unchecked-expression' - include: '#typeof-expression' - include: '#interpolated-string' @@ -300,6 +307,7 @@ repository: begin: (?=enum\s+) end: (?<=\}) patterns: + - include: '#preprocessor' - include: '#comment' - begin: (?=enum) end: (?=\{) @@ -872,7 +880,7 @@ repository: begin: (? { + before(() => should()); + + describe("Preprocessor", () => { + it("#define Foo", () => { + const input = `#define Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Define, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#define Foo//Foo", () => { + const input = `#define Foo//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Define, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#define Foo //Foo", () => { + const input = `#define Foo //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Define, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#undef Foo", () => { + const input = `#undef Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Undef, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#undef Foo//Foo", () => { + const input = `#undef Foo//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Undef, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#undef Foo //Foo", () => { + const input = `#undef Foo //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Undef, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#if true", () => { + const input = `#if true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Literals.Boolean.True + ]); + }); + + it("#if false", () => { + const input = `#if false`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Literals.Boolean.False + ]); + }); + + it("#if Foo", () => { + const input = `#if Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#if Foo || true", () => { + const input = `#if Foo || true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Logical.Or, + Token.Literals.Boolean.True + ]); + }); + + it("#if Foo && true", () => { + const input = `#if Foo && true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Logical.And, + Token.Literals.Boolean.True + ]); + }); + + it("#if Foo == true", () => { + const input = `#if Foo == true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.Equals, + Token.Literals.Boolean.True + ]); + }); + + it("#if Foo != true", () => { + const input = `#if Foo != true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True + ]); + }); + + it("#if !Foo", () => { + const input = `#if !Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Operators.Logical.Not, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#if (Foo != true) && Bar", () => { + const input = `#if (Foo != true) && Bar`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar") + ]); + }); + + it("#if (Foo != true) && Bar //Foo", () => { + const input = `#if (Foo != true) && Bar //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#if (Foo != true) && Bar //Foo", () => { + const input = `#if (Foo != true) && Bar //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#elif true", () => { + const input = `#elif true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Literals.Boolean.True + ]); + }); + + it("#elif false", () => { + const input = `#elif false`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Literals.Boolean.False + ]); + }); + + it("#elif Foo", () => { + const input = `#elif Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#elif Foo || true", () => { + const input = `#elif Foo || true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Logical.Or, + Token.Literals.Boolean.True + ]); + }); + + it("#elif Foo && true", () => { + const input = `#elif Foo && true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Logical.And, + Token.Literals.Boolean.True + ]); + }); + + it("#elif Foo == true", () => { + const input = `#elif Foo == true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.Equals, + Token.Literals.Boolean.True + ]); + }); + + it("#elif Foo != true", () => { + const input = `#elif Foo != true`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True + ]); + }); + + it("#elif !Foo", () => { + const input = `#elif !Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Operators.Logical.Not, + Token.Identifiers.PreprocessorSymbol("Foo") + ]); + }); + + it("#elif (Foo != true) && Bar", () => { + const input = `#elif (Foo != true) && Bar`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar") + ]); + }); + + it("#elif (Foo != true) && Bar//Foo", () => { + const input = `#elif (Foo != true) && Bar//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#elif (Foo != true) && Bar //Foo", () => { + const input = `#elif (Foo != true) && Bar //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.ElIf, + Token.Punctuation.OpenParen, + Token.Identifiers.PreprocessorSymbol("Foo"), + Token.Operators.Relational.NotEqual, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Operators.Logical.And, + Token.Identifiers.PreprocessorSymbol("Bar"), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#else", () => { + const input = `#else`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Else, + ]); + }); + + it("#else//Foo", () => { + const input = `#else//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Else, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#else //Foo", () => { + const input = `#else //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Else, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#endif", () => { + const input = `#endif`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndIf, + ]); + }); + + it("#endif//Foo", () => { + const input = `#endif//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndIf, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#endif //Foo", () => { + const input = `#endif //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndIf, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#warning This is a warning", () => { + const input = `#warning This is a warning`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Warning, + ]); + }); + + it("#error This is an error", () => { + const input = `#error This is an error`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Error, + ]); + }); + + it("#region My Region", () => { + const input = `#region My Region`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Region, + ]); + }); + + it("#region \"My Region\"", () => { + const input = `#region "My Region"`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Region, + ]); + }); + + it("#endregion", () => { + const input = `#endregion`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndRegion, + ]); + }); + + it("#endregion//Foo", () => { + const input = `#endregion//Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndRegion, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + + it("#endregion //Foo", () => { + const input = `#endregion //Foo`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndRegion, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Foo") + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 8cf55a4f7f..3cdba24e53 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -184,6 +184,7 @@ export namespace Token { export const LabelName = (text: string) => createToken(text, 'entity.name.label.cs'); export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); + export const PreprocessorSymbol = (text: string) => createToken(text, 'entity.name.variable.preprocessor.symbol.cs'); export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); } @@ -208,6 +209,26 @@ export namespace Token { export const Virtual = createToken('virtual', 'storage.modifier.cs'); } + export namespace Preprocessor { + export const Checksum = createToken('checksum', 'keyword.preprocessor.checksum.cs'); + export const Default = createToken('default', 'keyword.preprocessor.default.cs'); + export const Define = createToken('define', 'keyword.preprocessor.define.cs'); + export const Disable = createToken('disable', 'keyword.preprocessor.disable.cs'); + export const ElIf = createToken('elif', 'keyword.preprocessor.elif.cs'); + export const Else = createToken('else', 'keyword.preprocessor.else.cs'); + export const EndIf = createToken('endif', 'keyword.preprocessor.endif.cs'); + export const EndRegion = createToken('endregion', 'keyword.preprocessor.endregion.cs'); + export const Error = createToken('error', 'keyword.preprocessor.error.cs'); + export const Hidden = createToken('hidden', 'keyword.preprocessor.hidden.cs'); + export const If = createToken('if', 'keyword.preprocessor.if.cs'); + export const Line = createToken('line', 'keyword.preprocessor.line.cs'); + export const Pragma = createToken('pragma', 'keyword.preprocessor.pragma.cs'); + export const Region = createToken('region', 'keyword.preprocessor.region.cs'); + export const Restore = createToken('restore', 'keyword.preprocessor.restore.cs'); + export const Undef = createToken('undef', 'keyword.preprocessor.undef.cs'); + export const Warning = createToken('warning', 'keyword.preprocessor.warning.cs'); + } + export const Add = createToken('add', 'keyword.other.add.cs'); export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); @@ -325,6 +346,7 @@ export namespace Token { export const Colon = createToken(':', 'punctuation.separator.colon.cs'); export const ColonColon = createToken('::', 'punctuation.separator.coloncolon.cs'); export const Comma = createToken(',', 'punctuation.separator.comma.cs'); + export const Hash = createToken('#', 'punctuation.separator.hash.cs') export const OpenBrace = createToken('{', 'punctuation.curlybrace.open.cs'); export const OpenBracket = createToken('[', 'punctuation.squarebracket.open.cs'); export const OpenParen = createToken('(', 'punctuation.parenthesis.open.cs'); From ff5455a266a7cfd2a9834eb76a91239cc278da4d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 13:14:39 -0800 Subject: [PATCH 100/192] Add regression test for issue #731 --- syntaxes/csharp.tmLanguage.yml | 3 ++ test/syntaxes/preprocessor.test.syntax.ts | 57 ++++++++++++++++++++++- test/syntaxes/utils/tokenize.ts | 3 +- 3 files changed, 61 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 1dda07b94e..dc01522cdc 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1694,6 +1694,7 @@ repository: match: \. preprocessor: + name: meta.preprocessor.cs begin: ^\s*(\#)\s* beginCaptures: '1': { name: punctuation.separator.hash.cs } @@ -1738,11 +1739,13 @@ repository: captures: '1': { name: keyword.preprocessor.warning.cs } '2': { name: keyword.preprocessor.error.cs } + '3': { name: string.unquoted.preprocessor.message.cs } preprocessor-region: match: \b(region)\b\s*(.*)(?=$) captures: '1': { name: keyword.preprocessor.region.cs } + '2': { name: string.unquoted.preprocessor.message.cs } preprocessor-endregion: match: \b(endregion)\b diff --git a/test/syntaxes/preprocessor.test.syntax.ts b/test/syntaxes/preprocessor.test.syntax.ts index bb190fb470..3bd0c03a94 100644 --- a/test/syntaxes/preprocessor.test.syntax.ts +++ b/test/syntaxes/preprocessor.test.syntax.ts @@ -463,6 +463,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.Hash, Token.Keywords.Preprocessor.Warning, + Token.PreprocessorMessage("This is a warning") ]); }); @@ -473,6 +474,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.Hash, Token.Keywords.Preprocessor.Error, + Token.PreprocessorMessage("This is an error") ]); }); @@ -483,6 +485,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.Hash, Token.Keywords.Preprocessor.Region, + Token.PreprocessorMessage("My Region") ]); }); @@ -493,6 +496,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.Hash, Token.Keywords.Preprocessor.Region, + Token.PreprocessorMessage("\"My Region\"") ]); }); @@ -502,7 +506,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.Hash, - Token.Keywords.Preprocessor.EndRegion, + Token.Keywords.Preprocessor.EndRegion ]); }); @@ -529,5 +533,56 @@ describe("Grammar", () => { Token.Comment.SingleLine.Text("Foo") ]); }); + + it("region name with double-quotes should be highlighted properly (issue #731)", () => { + const input = Input.InClass(` +#region " Register / Create New " +// GET: /Account/Register +[Authorize(Roles = UserRoles.SuperUser)] +public ActionResult Register() +{ + RedirectToAction("Application"); + return View(); +} +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.Region, + Token.PreprocessorMessage("\" Register / Create New \""), + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" GET: /Account/Register"), + Token.Punctuation.OpenBracket, + Token.Type("Authorize"), + Token.Punctuation.OpenParen, + Token.Identifiers.PropertyName("Roles"), + Token.Operators.Assignment, + Token.Variables.Object("UserRoles"), + Token.Punctuation.Accessor, + Token.Variables.Property("SuperUser"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.Type("ActionResult"), + Token.Identifiers.MethodName("Register"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("RedirectToAction"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("Application"), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Keywords.Return, + Token.Identifiers.MethodName("View"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 3cdba24e53..1a846da3e1 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -7,7 +7,7 @@ import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; const registry = new Registry(); const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp.tmLanguage'); -const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.type.parameters.cs'] +const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.preprocessor.cs', 'meta.type.parameters.cs'] export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { if (typeof input === "string") { @@ -390,5 +390,6 @@ export namespace Token { } export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); + export const PreprocessorMessage = (text: string) => createToken(text, 'string.unquoted.preprocessor.message.cs'); export const Type = (text: string) => createToken(text, 'storage.type.cs'); } From b4f1ab2bcaa3838d2dacac034cecdefd387093b3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 13:18:51 -0800 Subject: [PATCH 101/192] Add regression test for issue #225 --- test/syntaxes/comments.test.syntax.ts | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index dba7320ade..885b823a42 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Comments", () => { it("single-line comment", () => { - const input = `// foo`; const tokens = tokenize(input); @@ -21,7 +20,6 @@ describe("Grammar", () => { }); it("single-line comment after whitespace", () => { - const input = ` // foo`; const tokens = tokenize(input); @@ -32,7 +30,6 @@ describe("Grammar", () => { }); it("multi-line comment", () => { - const input = `/* foo */`; const tokens = tokenize(input); @@ -43,7 +40,6 @@ describe("Grammar", () => { }); it("in namespace", () => { - const input = Input.InNamespace(`// foo`); const tokens = tokenize(input); @@ -53,7 +49,6 @@ describe("Grammar", () => { }); it("in class", () => { - const input = Input.InClass(`// foo`); const tokens = tokenize(input); @@ -63,7 +58,6 @@ describe("Grammar", () => { }); it("in enum", () => { - const input = Input.InEnum(`// foo`); const tokens = tokenize(input); @@ -73,7 +67,6 @@ describe("Grammar", () => { }); it("in interface", () => { - const input = Input.InInterface(`// foo`); const tokens = tokenize(input); @@ -83,7 +76,6 @@ describe("Grammar", () => { }); it("in struct", () => { - const input = Input.InStruct(`// foo`); const tokens = tokenize(input); @@ -93,7 +85,6 @@ describe("Grammar", () => { }); it("in method", () => { - const input = Input.InMethod(`// foo`); const tokens = tokenize(input); @@ -101,5 +92,21 @@ describe("Grammar", () => { Token.Comment.SingleLine.Start, Token.Comment.SingleLine.Text(" foo")]); }); + + it("comment should colorize if there isn't a space before it (issue #225)", () => { + const input = Input.InClass(` +private char GetChar()//Метод возвращающий +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Type("char"), + Token.Identifiers.MethodName("GetChar"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("Метод возвращающий")]); + }); }); }); \ No newline at end of file From 43cfbe97f99cc2a6eb73c35eac6cc3e0ed1b4e7b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 13:59:12 -0800 Subject: [PATCH 102/192] Improve display of type declaration names with type parameters and allow attributes in type parameter lists --- syntaxes/csharp.tmLanguage.yml | 70 ++++++++++++++++--------- test/syntaxes/classes.test.syntax.ts | 29 ++++++++-- test/syntaxes/delegates.test.syntax.ts | 41 ++++++++++++++- test/syntaxes/interfaces.test.syntax.ts | 23 ++++++-- test/syntaxes/methods.test.syntax.ts | 19 +++++-- test/syntaxes/structs.test.syntax.ts | 14 ++++- test/syntaxes/utils/tokenize.ts | 2 + 7 files changed, 158 insertions(+), 40 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index dc01522cdc..afc38a3bdc 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -10,9 +10,11 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * Refinement and tests to ensure proper highlighting while typing # * query expressions # * lambda expressions and anonymous functions -# * array, collection and object initializers +# * object creation with object initializer and no parentheses # * compound assignement -# * unsafe code +# * char literals +# * verbatim identifiers +# * unsafe code: fixed, sizeof, unsafe blocks, unsafe keyword # * XML doc comments patterns: @@ -253,13 +255,16 @@ repository: - begin: (?=class) end: (?=\{) patterns: - # C# grammar: class identifier type-parameter-list[opt] - - match: (class)\s+([_[:alpha:]][_[:alnum:]]*(\s*<\s*(?:[_[:alpha:]][_[:alnum:]]*\s*,\s*)*(?:[_[:alpha:]][_[:alnum:]]*)\s*>)?) + - match: |- + (?x) + \b(class)\b\s+ + ([_[:alpha:]][_[:alnum:]]*)\s* captures: '1': { name: keyword.other.class.cs } '2': { name: entity.name.type.class.cs } - - include: '#generic-constraints' + - include: '#type-parameter-list' - include: '#base-types' + - include: '#generic-constraints' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } @@ -283,12 +288,8 @@ repository: (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) (?:(?:\[,*\])*)? )\s+ - (? - (?: - \g - (?:\s*<\s*(?:(?:(?:in|out)\s+)?\g)(?:,\s*(?:(?:in|out)\s+)?\g)*\s*>\s*)? - ) - )\s* + (\g)\s* + (<([^<>]+)>)?\s* (?=\() beginCaptures: '1': { name: keyword.other.delegate.cs } @@ -297,6 +298,9 @@ repository: - include: '#type' # '3': ? is a sub-expression. It's final value is not considered. '4': { name: entity.name.type.delegate.cs } + '5': + patterns: + - include: '#type-parameter-list' end: (?=;) patterns: - include: '#comment' @@ -349,19 +353,16 @@ repository: - begin: (?=interface) end: (?=\{) patterns: - # C# grammar: interface identifier variant-type-parameter-list[opt] - match: |- (?x) - (interface)\s+ - ( - (?[_[:alpha:]][_[:alnum:]]*) - (\s*<\s*(?:(?:(?:in|out)\s+)?\g\s*,\s*)*(?:(?:in|out)\s+)?\g\s*>)? - ) + (interface)\b\s+ + ([_[:alpha:]][_[:alnum:]]*) captures: '1': { name: keyword.other.interface.cs } '2': { name: entity.name.type.interface.cs } - - include: '#generic-constraints' + - include: '#type-parameter-list' - include: '#base-types' + - include: '#generic-constraints' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } @@ -379,19 +380,16 @@ repository: - begin: (?=struct) end: (?=\{) patterns: - # C# grammar: struct identifier type-parameter-list[opt] - match: |- (?x) (struct)\s+ - ( - (?[_[:alpha:]][_[:alnum:]]*) - (\s*<\s*(?:\g\s*,\s*)*\g\s*>)? - ) + ([_[:alpha:]][_[:alnum:]]*) captures: '1': { name: keyword.other.struct.cs } '2': { name: entity.name.type.struct.cs } - - include: '#generic-constraints' + - include: '#type-parameter-list' - include: '#base-types' + - include: '#generic-constraints' - begin: \{ beginCaptures: '0': { name: punctuation.curlybrace.open.cs } @@ -401,6 +399,24 @@ repository: patterns: - include: '#struct-members' + type-parameter-list: + begin: \< + beginCaptures: + '0': { name: punctuation.definition.typeparameters.begin.cs } + end: \> + endCaptures: + '0': { name: punctuation.definition.typeparameters.end.cs } + patterns: + - match: \b(in|out)\b + captures: + '1': { name: storage.modifier.cs } + - match: \b([_[:alpha:]][_[:alnum:]]*)\b + captures: + '1': { name: entity.name.type.type-parameter.cs } + - include: '#comment' + - include: '#punctuation-comma' + - include: '#attribute-section' + base-types: begin: ':' beginCaptures: @@ -633,7 +649,8 @@ repository: )\s+ ) (?\g\s*\.\s*)? - (?\g(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)\s* + (\g)\s* + (<([^<>]+)>)?\s* (?=\() beginCaptures: '1': @@ -646,6 +663,9 @@ repository: - include: '#type' - include: '#punctuation-accessor' '5': { name: entity.name.function.cs } + '6': + patterns: + - include: '#type-parameter-list' end: (?<=\})|(?=;) patterns: - include: '#comment' diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 2933cfe54c..9b91d13a97 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -98,7 +98,12 @@ public abstract class PublicAbstractClass { } tokens.should.deep.equal([ Token.Keywords.Class, - Token.Identifiers.ClassName("Dictionary"), + Token.Identifiers.ClassName("Dictionary"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("TKey"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("TValue"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); @@ -123,7 +128,10 @@ class PublicClass : Dictionary>, IMap"), + Token.Identifiers.ClassName("PublicClass"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.Colon, Token.Type("Root"), Token.Punctuation.Accessor, @@ -141,7 +149,10 @@ class PublicClass : Dictionary>, IMap"), + Token.Identifiers.ClassName("PublicClass"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.Colon, Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, @@ -183,7 +194,10 @@ class PublicClass : Dictionary[]>, ISomething tokens.should.deep.equal([ Token.Keywords.Class, - Token.Identifiers.ClassName("PublicClass"), + Token.Identifiers.ClassName("PublicClass"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.TypeParameters.End, Token.Keywords.Where, Token.Type("T"), Token.Punctuation.Colon, @@ -192,7 +206,12 @@ class PublicClass : Dictionary[]>, ISomething Token.Punctuation.CloseBrace, Token.Keywords.Class, - Token.Identifiers.ClassName("PublicClass"), + Token.Identifiers.ClassName("PublicClass"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("X"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.Colon, Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 5aacc09600..4f3d840ff2 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -32,7 +32,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Delegate, Token.Type("TResult"), - Token.Identifiers.DelegateName("D"), + Token.Identifiers.DelegateName("D"), + Token.Punctuation.TypeParameters.Begin, + Token.Keywords.Modifiers.In, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.Comma, + Token.Keywords.Modifiers.Out, + Token.Identifiers.TypeParameterName("TResult"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg1"), @@ -52,7 +59,12 @@ delegate void D() tokens.should.deep.equal([ Token.Keywords.Delegate, Token.Type("void"), - Token.Identifiers.DelegateName("D"), + Token.Identifiers.DelegateName("D"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Keywords.Where, @@ -62,6 +74,31 @@ delegate void D() Token.Punctuation.Semicolon]); }); + it("generic delegate with attributes on type parameters", () => { + + const input = `delegate void D<[Foo] T1, [Bar] T2>();`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Delegate, + Token.Type("void"), + Token.Identifiers.DelegateName("D"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.OpenBracket, + Token.Type("Foo"), + Token.Punctuation.CloseBracket, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Punctuation.OpenBracket, + Token.Type("Bar"), + Token.Punctuation.CloseBracket, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon]); + }); + it("delegate with multiple parameters", () => { const input = `delegate int D(ref string x, out int y, params object[] z);`; diff --git a/test/syntaxes/interfaces.test.syntax.ts b/test/syntaxes/interfaces.test.syntax.ts index 673f39bca6..ba7460b535 100644 --- a/test/syntaxes/interfaces.test.syntax.ts +++ b/test/syntaxes/interfaces.test.syntax.ts @@ -51,7 +51,12 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, - Token.Identifiers.InterfaceName("IFoo"), + Token.Identifiers.InterfaceName("IFoo"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); @@ -63,7 +68,14 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, - Token.Identifiers.InterfaceName("IFoo"), + Token.Identifiers.InterfaceName("IFoo"), + Token.Punctuation.TypeParameters.Begin, + Token.Keywords.Modifiers.In, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Keywords.Modifiers.Out, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); @@ -75,7 +87,12 @@ interface IBar : IFoo { } tokens.should.deep.equal([ Token.Keywords.Interface, - Token.Identifiers.InterfaceName("IFoo"), + Token.Identifiers.InterfaceName("IFoo"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Keywords.Where, Token.Type("T1"), Token.Punctuation.Colon, diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 2a4be332fc..33180902be 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -59,7 +59,12 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("TResult"), - Token.Identifiers.MethodName("GetString"), + Token.Identifiers.MethodName("GetString"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("TResult"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg"), @@ -153,7 +158,12 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Type("TResult"), - Token.Identifiers.MethodName("GetString"), + Token.Identifiers.MethodName("GetString"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("TResult"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), Token.Variables.Parameter("arg"), @@ -248,7 +258,10 @@ public interface test Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Type("Task"), - Token.Identifiers.MethodName("test"), + Token.Identifiers.MethodName("test"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, diff --git a/test/syntaxes/structs.test.syntax.ts b/test/syntaxes/structs.test.syntax.ts index c7588130ab..008f9de46e 100644 --- a/test/syntaxes/structs.test.syntax.ts +++ b/test/syntaxes/structs.test.syntax.ts @@ -52,7 +52,12 @@ struct S { } tokens.should.deep.equal([ Token.Keywords.Struct, - Token.Identifiers.StructName("S"), + Token.Identifiers.StructName("S"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); @@ -66,7 +71,12 @@ struct S where T1 : T2 { } tokens.should.deep.equal([ Token.Keywords.Struct, - Token.Identifiers.StructName("S"), + Token.Identifiers.StructName("S"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T1"), + Token.Punctuation.Comma, + Token.Identifiers.TypeParameterName("T2"), + Token.Punctuation.TypeParameters.End, Token.Keywords.Where, Token.Type("T1"), Token.Punctuation.Colon, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 1a846da3e1..861efbf7f6 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -187,12 +187,14 @@ export namespace Token { export const PreprocessorSymbol = (text: string) => createToken(text, 'entity.name.variable.preprocessor.symbol.cs'); export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); + export const TypeParameterName = (text: string) => createToken(text, 'entity.name.type.type-parameter.cs'); } export namespace Keywords { export namespace Modifiers { export const Abstract = createToken('abstract', 'storage.modifier.cs'); export const Const = createToken('const', 'storage.modifier.cs'); + export const In = createToken('in', 'storage.modifier.cs'); export const Internal = createToken('internal', 'storage.modifier.cs'); export const New = createToken('new', 'storage.modifier.cs'); export const Out = createToken('out', 'storage.modifier.cs'); From 6255ad9d5f8546a47493dff382082e1198ee304a Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sat, 7 Jan 2017 14:26:06 -0800 Subject: [PATCH 103/192] Display built-in types as keywords --- syntaxes/csharp.tmLanguage.yml | 5 +- test/syntaxes/boolean-literals.test.syntax.ts | 4 +- test/syntaxes/cast-expressions.test.syntax.ts | 10 +- .../syntaxes/checked-unchecked.test.syntax.ts | 4 +- test/syntaxes/classes.test.syntax.ts | 10 +- test/syntaxes/comments.test.syntax.ts | 2 +- test/syntaxes/constructors.test.syntax.ts | 24 +-- test/syntaxes/delegates.test.syntax.ts | 14 +- .../element-access-expressions.test.syntax.ts | 6 +- test/syntaxes/enums.test.syntax.ts | 2 +- test/syntaxes/events.test.syntax.ts | 2 +- test/syntaxes/expressions.test.syntax.ts | 2 +- test/syntaxes/fields.test.syntax.ts | 38 ++--- test/syntaxes/indexers.test.syntax.ts | 22 +-- .../interpolated-strings.test.syntax.ts | 20 +-- .../invocation-expressions.test.syntax.ts | 12 +- .../iteration-statements.test.syntax.ts | 8 +- test/syntaxes/locals.test.syntax.ts | 12 +- test/syntaxes/lock-statements.test.syntax.ts | 8 +- test/syntaxes/methods.test.syntax.ts | 50 +++---- test/syntaxes/numeric-literals.test.syntax.ts | 8 +- test/syntaxes/operators.test.syntax.ts | 140 +++++++++--------- test/syntaxes/properties.test.syntax.ts | 20 +-- test/syntaxes/string-literals.test.syntax.ts | 12 +- test/syntaxes/type-names.test.syntax.ts | 20 +-- test/syntaxes/using-directives.test.syntax.ts | 4 +- test/syntaxes/using-statements.test.syntax.ts | 12 +- test/syntaxes/utils/tokenize.ts | 19 +++ 28 files changed, 255 insertions(+), 235 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index afc38a3bdc..a6ba0bfad7 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -12,6 +12,7 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * lambda expressions and anonymous functions # * object creation with object initializer and no parentheses # * compound assignement +# * nullable types # * char literals # * verbatim identifiers # * unsafe code: fixed, sizeof, unsafe blocks, unsafe keyword @@ -1653,9 +1654,9 @@ repository: '3': { name: entity.name.variable.tuple.cs } type-builtin: - match: \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort)\b + match: \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort|void)\b captures: - '1': { name: storage.type.cs } + '1': { name: keyword.type.cs } type-name: patterns: diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts index d771dec7d9..9c9363c03d 100644 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ b/test/syntaxes/boolean-literals.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Boolean.True, @@ -29,7 +29,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Boolean.False, diff --git a/test/syntaxes/cast-expressions.test.syntax.ts b/test/syntaxes/cast-expressions.test.syntax.ts index 2e7c5ca67c..1cef5fcbe9 100644 --- a/test/syntaxes/cast-expressions.test.syntax.ts +++ b/test/syntaxes/cast-expressions.test.syntax.ts @@ -19,7 +19,7 @@ describe("Grammar", () => { Token.Variables.Local("o"), Token.Operators.Assignment, Token.Punctuation.OpenParen, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.Semicolon @@ -37,7 +37,7 @@ describe("Grammar", () => { Token.Punctuation.OpenParen, Token.Type("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), @@ -53,7 +53,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.CloseParen, @@ -69,10 +69,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Punctuation.OpenParen, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.CloseParen, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.CloseParen, diff --git a/test/syntaxes/checked-unchecked.test.syntax.ts b/test/syntaxes/checked-unchecked.test.syntax.ts index e1cbe42d14..40361386ae 100644 --- a/test/syntaxes/checked-unchecked.test.syntax.ts +++ b/test/syntaxes/checked-unchecked.test.syntax.ts @@ -43,7 +43,7 @@ unchecked const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Keywords.Checked, @@ -59,7 +59,7 @@ unchecked const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Keywords.Unchecked, diff --git a/test/syntaxes/classes.test.syntax.ts b/test/syntaxes/classes.test.syntax.ts index 9b91d13a97..276b73aeb9 100644 --- a/test/syntaxes/classes.test.syntax.ts +++ b/test/syntaxes/classes.test.syntax.ts @@ -160,9 +160,9 @@ class PublicClass : Dictionary>, IMap : Dictionary>, IMap : Dictionary[]>, ISomething Token.Punctuation.Comma, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, diff --git a/test/syntaxes/comments.test.syntax.ts b/test/syntaxes/comments.test.syntax.ts index 885b823a42..cb0450624d 100644 --- a/test/syntaxes/comments.test.syntax.ts +++ b/test/syntaxes/comments.test.syntax.ts @@ -101,7 +101,7 @@ private char GetChar()//Метод возвращающий tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, - Token.Type("char"), + Token.PrimitiveType.Char, Token.Identifiers.MethodName("GetChar"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index 39c705481b..1878ab4626 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -46,7 +46,7 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Public, Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -63,7 +63,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -81,10 +81,10 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -99,10 +99,10 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, @@ -170,7 +170,7 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.Colon, @@ -191,7 +191,7 @@ TestClass(int x, int y) tokens.should.deep.equal([ Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.Colon, @@ -293,7 +293,7 @@ public class A Token.Keywords.New, Token.Type("B"), Token.Punctuation.TypeParameters.Begin, - Token.Type("char"), + Token.PrimitiveType.Char, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -301,7 +301,7 @@ public class A Token.Keywords.New, Token.Type("B"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -325,7 +325,7 @@ public class A Token.Keywords.New, Token.Type("B"), Token.Punctuation.TypeParameters.Begin, - Token.Type("char"), + Token.PrimitiveType.Char, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -336,7 +336,7 @@ public class A Token.Keywords.New, Token.Type("B"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 4f3d840ff2..9bf09bd072 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Delegate, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.DelegateName("D"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -58,7 +58,7 @@ delegate void D() tokens.should.deep.equal([ Token.Keywords.Delegate, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.DelegateName("D"), Token.Punctuation.TypeParameters.Begin, Token.Identifiers.TypeParameterName("T1"), @@ -81,7 +81,7 @@ delegate void D() tokens.should.deep.equal([ Token.Keywords.Delegate, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.DelegateName("D"), Token.Punctuation.TypeParameters.Begin, Token.Punctuation.OpenBracket, @@ -106,19 +106,19 @@ delegate void D() tokens.should.deep.equal([ Token.Keywords.Delegate, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.DelegateName("D"), Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, - Token.Type("string"), + Token.PrimitiveType.String, Token.Variables.Parameter("x"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Out, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, Token.Variables.Parameter("z"), diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts index 7d9b03d239..49ed323122 100644 --- a/test/syntaxes/element-access-expressions.test.syntax.ts +++ b/test/syntaxes/element-access-expressions.test.syntax.ts @@ -125,7 +125,7 @@ describe("Grammar", () => { Token.Operators.Assignment, Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Variables.Property("P"), @@ -147,7 +147,7 @@ describe("Grammar", () => { Token.Punctuation.Accessor, Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Variables.Property("P"), @@ -165,7 +165,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, Token.Variables.Local("a1"), diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 0934b914f9..868c3cebd0 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -31,7 +31,7 @@ describe("Grammar", () => { Token.Keywords.Enum, Token.Identifiers.EnumName("E"), Token.Punctuation.Colon, - Token.Type("byte"), + Token.PrimitiveType.Byte, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); }); diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 569a8e66e6..95b9f9a838 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -114,7 +114,7 @@ public event Type Event Token.Type("EventHandler"), Token.Type("IFoo"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.EventName("Event"), diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 2f0f2177da..475261c622 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -24,7 +24,7 @@ describe("Grammar", () => { Token.Variables.ReadWrite("s"), Token.Punctuation.Comma, Token.Keywords.New, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, Token.Punctuation.OpenBrace, diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index 21b4b2cf0d..e0b82f290f 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -80,11 +80,11 @@ string _field3;`); Token.Punctuation.Semicolon, Token.Keywords.Modifiers.ReadOnly, - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("_field2"), Token.Punctuation.Semicolon, - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("_field3"), Token.Punctuation.Semicolon]); }); @@ -98,11 +98,11 @@ string[] field123;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("field123"), Token.Punctuation.Semicolon, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, Token.Identifiers.FieldName("field123"), @@ -119,7 +119,7 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("field"), Token.Operators.Assignment, Token.Punctuation.String.Begin, @@ -128,7 +128,7 @@ const bool field = true;`); Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Const, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Identifiers.FieldName("field"), Token.Operators.Assignment, Token.Literals.Boolean.True, @@ -141,7 +141,7 @@ const bool field = true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), @@ -163,9 +163,9 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -179,9 +179,9 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -194,10 +194,10 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Tuple("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Tuple("y"), Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), @@ -212,10 +212,10 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Tuple("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Tuple("y"), Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), @@ -268,18 +268,18 @@ private readonly Dictionary languageToIndex = new Dictionary { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -39,7 +39,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -54,7 +54,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -69,7 +69,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -94,7 +94,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -111,7 +111,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.Begin, @@ -131,7 +131,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.VerbatimBegin, @@ -154,7 +154,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.VerbatimBegin, @@ -183,7 +183,7 @@ world {two}!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.VerbatimBegin, @@ -208,7 +208,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.InterpolatedString.VerbatimBegin, diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts index c0e95ec337..3e134d4641 100644 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -104,7 +104,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -121,7 +121,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.Type("T"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, @@ -141,7 +141,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.Type("U"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.TypeParameters.End, Token.Punctuation.TypeParameters.End, @@ -158,7 +158,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), @@ -177,7 +177,7 @@ describe("Grammar", () => { Token.Punctuation.Accessor, Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), @@ -199,7 +199,7 @@ describe("Grammar", () => { Token.Punctuation.Accessor, Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.MethodName("M"), diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 2e16e9848b..0b8e9fe840 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -49,7 +49,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.For, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), @@ -78,7 +78,7 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.For, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), @@ -109,7 +109,7 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.For, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), @@ -136,7 +136,7 @@ for (int i = 0; i < 42; i++) tokens.should.deep.equal([ Token.Keywords.ForEach, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("i"), Token.Keywords.In, Token.Variables.ReadWrite("numbers"), diff --git a/test/syntaxes/locals.test.syntax.ts b/test/syntaxes/locals.test.syntax.ts index 521573ecc3..30e9b37213 100644 --- a/test/syntaxes/locals.test.syntax.ts +++ b/test/syntaxes/locals.test.syntax.ts @@ -15,7 +15,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Punctuation.Semicolon ]); @@ -26,7 +26,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), @@ -39,7 +39,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Punctuation.Comma, Token.Variables.Local("y"), @@ -52,7 +52,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), @@ -70,7 +70,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Const, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), @@ -84,7 +84,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Const, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Local("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), diff --git a/test/syntaxes/lock-statements.test.syntax.ts b/test/syntaxes/lock-statements.test.syntax.ts index 5df41dd6b5..3b9f607ed7 100644 --- a/test/syntaxes/lock-statements.test.syntax.ts +++ b/test/syntaxes/lock-statements.test.syntax.ts @@ -18,7 +18,7 @@ describe("Grammar", () => { Token.Keywords.Lock, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -37,7 +37,7 @@ describe("Grammar", () => { Token.Keywords.Lock, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -60,7 +60,7 @@ lock (new object()) Token.Keywords.Lock, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -83,7 +83,7 @@ lock (new object()) Token.Keywords.Lock, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 33180902be..21128b9cf5 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("Foo"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -34,13 +34,13 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.MethodName("Add"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -83,13 +83,13 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.MethodName("Add"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, @@ -105,10 +105,10 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Type("IFoo"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.MethodName("GetString"), @@ -123,7 +123,7 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.MethodName("GetString"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -136,14 +136,14 @@ int Add(int x, int y) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.MethodName("GetString"), Token.Punctuation.OpenParen, - Token.Type("string"), + Token.PrimitiveType.String, Token.Variables.Parameter("format"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, Token.Variables.Parameter("args"), @@ -214,17 +214,17 @@ int Add(int x, int y) tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("methodWithParametersCommented"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("p1"), Token.Punctuation.Comma, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.Text("int p2"), Token.Comment.MultiLine.End, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("p3"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -252,7 +252,7 @@ public interface test Token.Punctuation.OpenParen, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Variables.Parameter("blah"), Token.Punctuation.CloseParen, @@ -320,13 +320,13 @@ namespace Test Token.Punctuation.CloseParen, Token.Punctuation.CloseBracket, Token.Keywords.Modifiers.Public, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("AddToGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), Token.Variables.Parameter("id"), Token.Punctuation.Comma, - Token.Type("string"), + Token.PrimitiveType.String, Token.Variables.Parameter("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, @@ -361,13 +361,13 @@ namespace Test Token.Punctuation.CloseParen, Token.Punctuation.CloseBracket, Token.Keywords.Modifiers.Public, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("AddToNotGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), Token.Variables.Parameter("id"), Token.Punctuation.Comma, - Token.Type("string"), + Token.PrimitiveType.String, Token.Variables.Parameter("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, @@ -402,13 +402,13 @@ namespace Test Token.Punctuation.CloseParen, Token.Punctuation.CloseBracket, Token.Keywords.Modifiers.Public, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("AddToNotSureIfGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), Token.Variables.Parameter("id"), Token.Punctuation.Comma, - Token.Type("string"), + Token.PrimitiveType.String, Token.Variables.Parameter("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, @@ -449,7 +449,7 @@ new void foo2() //Function name not highlighted tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, Token.Keywords.Modifiers.New, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("foo1"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -458,7 +458,7 @@ new void foo2() //Function name not highlighted Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.New, - Token.Type("void"), + Token.PrimitiveType.Void, Token.Identifiers.MethodName("foo2"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts index 095c44358b..0c5d736aae 100644 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ b/test/syntaxes/numeric-literals.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), @@ -29,7 +29,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Hexadecimal("0x0"), @@ -42,7 +42,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("int"), + Token.PrimitiveType.Int, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Binary("0b0"), @@ -55,7 +55,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("float"), + Token.PrimitiveType.Float, Token.Identifiers.FieldName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0.0"), diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts index ae34d71921..b18b9b2d99 100644 --- a/test/syntaxes/operators.test.syntax.ts +++ b/test/syntaxes/operators.test.syntax.ts @@ -18,11 +18,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -41,11 +41,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("-"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -64,11 +64,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName("!"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -88,11 +88,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("~"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -111,11 +111,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("++"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -134,11 +134,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("--"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -157,11 +157,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("true"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -181,11 +181,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("false"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -205,14 +205,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -232,14 +232,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("-"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -259,14 +259,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("*"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -286,14 +286,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("/"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -313,14 +313,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("%"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -340,14 +340,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("&"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -367,14 +367,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("|"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -394,14 +394,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("^"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -421,14 +421,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("<<"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -448,14 +448,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName(">>"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -475,14 +475,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName("=="), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -502,14 +502,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName("!="), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -529,14 +529,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName(">"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -556,14 +556,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName("<"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -583,14 +583,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName(">="), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -610,14 +610,14 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Keywords.Operator, Token.Identifiers.MethodName("<="), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -639,9 +639,9 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Static, Token.Keywords.Implicit, Token.Keywords.Operator, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -663,9 +663,9 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Static, Token.Keywords.Explicit, Token.Keywords.Operator, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -685,11 +685,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Public, Token.Keywords.Modifiers.Static, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Keywords.Operator, Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Operators.Arrow, diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index fc299ab901..16f686cb5e 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -150,7 +150,7 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.Comma, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, @@ -177,7 +177,7 @@ public IBooom Property Token.Keywords.Modifiers.Public, Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.Comma, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, @@ -195,7 +195,7 @@ public IBooom Property Token.Keywords.New, Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.Comma, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, @@ -218,7 +218,7 @@ private bool prop2 => true;`); tokens.should.deep.equal([ Token.Keywords.Modifiers.Private, - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.PropertyName("prop1"), Token.Operators.Arrow, Token.Punctuation.String.Begin, @@ -227,7 +227,7 @@ private bool prop2 => true;`); Token.Punctuation.Semicolon, Token.Keywords.Modifiers.Private, - Token.Type("bool"), + Token.PrimitiveType.Bool, Token.Identifiers.PropertyName("prop2"), Token.Operators.Arrow, Token.Literals.Boolean.True, @@ -240,10 +240,10 @@ private bool prop2 => true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Type("IFoo"), Token.Punctuation.TypeParameters.Begin, - Token.Type("string"), + Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Identifiers.PropertyName("Bar"), @@ -261,7 +261,7 @@ private bool prop2 => true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.PropertyName("Bar"), Token.Punctuation.OpenBrace, Token.Keywords.Get, @@ -277,7 +277,7 @@ private bool prop2 => true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.PropertyName("Bar"), Token.Punctuation.OpenBrace, Token.Keywords.Get, @@ -291,7 +291,7 @@ private bool prop2 => true;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.PropertyName("Bar"), Token.Punctuation.OpenBrace, Token.Keywords.Set, diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts index 4c1f9142f7..c28bbbaada 100644 --- a/test/syntaxes/string-literals.test.syntax.ts +++ b/test/syntaxes/string-literals.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.Begin, @@ -31,7 +31,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.Begin, @@ -51,7 +51,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.Begin, @@ -71,7 +71,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.VerbatimBegin, @@ -86,7 +86,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.VerbatimBegin, @@ -106,7 +106,7 @@ world!";`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("string"), + Token.PrimitiveType.String, Token.Identifiers.FieldName("test"), Token.Operators.Assignment, Token.Punctuation.String.VerbatimBegin, diff --git a/test/syntaxes/type-names.test.syntax.ts b/test/syntaxes/type-names.test.syntax.ts index 0afeb26845..32c2f90124 100644 --- a/test/syntaxes/type-names.test.syntax.ts +++ b/test/syntaxes/type-names.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("object"), + Token.PrimitiveType.Object, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); }); @@ -56,9 +56,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -72,7 +72,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("List"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -87,9 +87,9 @@ describe("Grammar", () => { Token.Type("List"), Token.Punctuation.TypeParameters.Begin, Token.Punctuation.OpenParen, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.CloseParen, Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), @@ -104,9 +104,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("Dictionary"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.Comma, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -126,7 +126,7 @@ describe("Grammar", () => { Token.Punctuation.Accessor, Token.Type("List"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); @@ -140,7 +140,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("List"), Token.Punctuation.TypeParameters.Begin, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Punctuation.Accessor, Token.Type("Enumerator"), diff --git a/test/syntaxes/using-directives.test.syntax.ts b/test/syntaxes/using-directives.test.syntax.ts index a87f7f4439..272db55985 100644 --- a/test/syntaxes/using-directives.test.syntax.ts +++ b/test/syntaxes/using-directives.test.syntax.ts @@ -141,7 +141,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Type("int"), + Token.PrimitiveType.Int, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, Token.Punctuation.Comma, @@ -153,7 +153,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, - Token.Type("string"), + Token.PrimitiveType.String, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.End, Token.Punctuation.TypeParameters.End, diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts index fb884c8140..8b33ee054f 100644 --- a/test/syntaxes/using-statements.test.syntax.ts +++ b/test/syntaxes/using-statements.test.syntax.ts @@ -18,7 +18,7 @@ describe("Grammar", () => { Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -37,7 +37,7 @@ describe("Grammar", () => { Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -60,7 +60,7 @@ using (new object()) Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -83,7 +83,7 @@ using (new object()) Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -109,7 +109,7 @@ using (var o = new object()) Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, @@ -135,7 +135,7 @@ using (var o = new object()) Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, - Token.Type("object"), + Token.PrimitiveType.Object, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 861efbf7f6..acedc01822 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -340,6 +340,25 @@ export namespace Token { } } + export namespace PrimitiveType { + export const Bool = createToken('bool', 'keyword.type.cs'); + export const Byte = createToken('byte', 'keyword.type.cs'); + export const Char = createToken('char', 'keyword.type.cs'); + export const Decimal = createToken('decimal', 'keyword.type.cs'); + export const Double = createToken('double', 'keyword.type.cs'); + export const Float = createToken('float', 'keyword.type.cs'); + export const Int = createToken('int', 'keyword.type.cs'); + export const Long = createToken('long', 'keyword.type.cs'); + export const Object = createToken('object', 'keyword.type.cs'); + export const SByte = createToken('sbyte', 'keyword.type.cs'); + export const Short = createToken('short', 'keyword.type.cs'); + export const String = createToken('string', 'keyword.type.cs'); + export const UInt = createToken('uint', 'keyword.type.cs'); + export const ULong = createToken('ulong', 'keyword.type.cs'); + export const UShort = createToken('ushort', 'keyword.type.cs'); + export const Void = createToken('void', 'keyword.type.cs'); + } + export namespace Punctuation { export const Accessor = createToken('.', 'punctuation.accessor.cs'); export const CloseBrace = createToken('}', 'punctuation.curlybrace.close.cs'); From 80745b98a03006f02d9f4cd02cd4a2bc0279aa6e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sun, 8 Jan 2017 06:47:02 -0800 Subject: [PATCH 104/192] Big revamp of type name matching and support added for nullable type names --- syntaxes/csharp.tmLanguage.yml | 512 ++++++++++++++---------- syntaxes/syntax.md | 21 +- test/syntaxes/type-names.test.syntax.ts | 12 + test/syntaxes/utils/tokenize.ts | 27 +- 4 files changed, 341 insertions(+), 231 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index a6ba0bfad7..cf812a3a15 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -12,7 +12,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * lambda expressions and anonymous functions # * object creation with object initializer and no parentheses # * compound assignement -# * nullable types # * char literals # * verbatim identifiers # * unsafe code: fixed, sizeof, unsafe blocks, unsafe keyword @@ -249,20 +248,18 @@ repository: match: (?(?:\b(?:delegate)\b))\s+ - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (?:\b(delegate)\b)\s+ + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ (\g)\s* (<([^<>]+)>)?\s* @@ -298,8 +301,11 @@ repository: patterns: - include: '#type' # '3': ? is a sub-expression. It's final value is not considered. - '4': { name: entity.name.type.delegate.cs } - '5': + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: entity.name.type.delegate.cs } + '8': patterns: - include: '#type-parameter-list' end: (?=;) @@ -309,15 +315,12 @@ repository: - include: '#generic-constraints' enum-declaration: - begin: (?=enum\s+) + begin: (?=\benum\b) end: (?<=\}) patterns: - - include: '#preprocessor' - - include: '#comment' - begin: (?=enum) end: (?=\{) patterns: - # C# grammar: enum identifier - match: (enum)\s+([_[:alpha:]][_[:alnum:]]*) captures: '1': { name: keyword.other.enum.cs } @@ -345,22 +348,22 @@ repository: patterns: - include: '#comment' - include: '#variable-initializer' + - include: '#preprocessor' + - include: '#comment' interface-declaration: - begin: (?=interface\s+) + begin: (?=\binterface\b) end: (?<=\}) patterns: - - include: '#comment' - - begin: (?=interface) + - begin: |- + (?x) + (interface)\b\s+ + ([_[:alpha:]][_[:alnum:]]*) + beginCaptures: + '1': { name: keyword.other.interface.cs } + '2': { name: entity.name.type.interface.cs } end: (?=\{) patterns: - - match: |- - (?x) - (interface)\b\s+ - ([_[:alpha:]][_[:alnum:]]*) - captures: - '1': { name: keyword.other.interface.cs } - '2': { name: entity.name.type.interface.cs } - include: '#type-parameter-list' - include: '#base-types' - include: '#generic-constraints' @@ -372,22 +375,22 @@ repository: '0': { name: punctuation.curlybrace.close.cs } patterns: - include: '#interface-members' + - include: '#preprocessor' + - include: '#comment' struct-declaration: - begin: (?=struct\s+) + begin: (?=\bstruct\b) end: (?<=\}) patterns: - - include: '#comment' - - begin: (?=struct) + - begin: |- + (?x) + (struct)\b\s+ + ([_[:alpha:]][_[:alnum:]]*) + beginCaptures: + '1': { name: keyword.other.struct.cs } + '2': { name: entity.name.type.struct.cs } end: (?=\{) patterns: - - match: |- - (?x) - (struct)\s+ - ([_[:alpha:]][_[:alnum:]]*) - captures: - '1': { name: keyword.other.struct.cs } - '2': { name: entity.name.type.struct.cs } - include: '#type-parameter-list' - include: '#base-types' - include: '#generic-constraints' @@ -399,6 +402,8 @@ repository: '0': { name: punctuation.curlybrace.close.cs } patterns: - include: '#struct-members' + - include: '#preprocessor' + - include: '#comment' type-parameter-list: begin: \< @@ -451,25 +456,31 @@ repository: field-declaration: begin: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ - (?\g)\s* + (\g)\s* # first field name (?!=>|==)(?=,|;|=) beginCaptures: '1': patterns: - include: '#type' - '2': { name: entity.name.variable.field.cs } + # '2': ? is a sub-expression. It's final value is not considered. + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: entity.name.variable.field.cs } end: (?=;) patterns: - name: entity.name.variable.field.cs @@ -483,17 +494,19 @@ repository: (?x) (?!.*\b(?:class|interface|struct|enum|event)\b)\s* (? - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ ) (?\g\s*\.\s*)? @@ -505,11 +518,14 @@ repository: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. # '3': ? is a sub-expression. It's final value is not considered. - '4': + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': patterns: - include: '#type' - include: '#punctuation-accessor' - '5': { name: entity.name.variable.property.cs } + '8': { name: entity.name.variable.property.cs } end: (?<=\})|(?=;) patterns: - include: '#comment' @@ -521,17 +537,19 @@ repository: begin: |- (?x) (? - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ ) (?\g\s*\.\s*)? @@ -543,11 +561,14 @@ repository: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. # '3': ? is a sub-expression. It's final value is not considered. - '4': + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': patterns: - include: '#type' - include: '#punctuation-accessor' - '5': + '8': name: keyword.other.this.cs end: (?<=\})|(?=;) patterns: @@ -560,19 +581,21 @@ repository: event-declaration: begin: |- (?x) - \b(?event)\b\s* + \b(event)\b\s* (? - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ ) (?\g\s*\.\s*)? @@ -585,11 +608,14 @@ repository: - include: '#type' # '3': ? is a sub-expression. It's final value is not considered. # '4': ? is a sub-expression. It's final value is not considered. - '5': + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + # '7': ? is a sub-expression. It's final value is not considered. + '8': patterns: - include: '#type' - include: '#punctuation-accessor' - '6': + '9': patterns: - name: entity.name.variable.event.cs match: '[_[:alpha:]][_[:alnum:]]*' @@ -636,17 +662,19 @@ repository: begin: |- (?x) (? - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ ) (?\g\s*\.\s*)? @@ -659,12 +687,15 @@ repository: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. # '3': ? is a sub-expression. It's final value is not considered. - '4': + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': patterns: - include: '#type' - include: '#punctuation-accessor' - '5': { name: entity.name.function.cs } - '6': + '8': { name: entity.name.function.cs } + '9': patterns: - include: '#type-parameter-list' end: (?<=\})|(?=;) @@ -712,17 +743,19 @@ repository: operator-declaration: begin: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s* (?(?:\b(?:operator)))\s* (?(?:\+|-|\*|/|%|&|\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s* @@ -732,8 +765,11 @@ repository: patterns: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. - '3': { name: keyword.other.operator.cs } - '4': { name: entity.name.function.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: keyword.other.operator.cs } + '7': { name: entity.name.function.cs } end: (?<=\})|(?=;) patterns: - include: '#comment' @@ -746,17 +782,19 @@ repository: (?x) (?(?:\b(?:explicit|implicit)))\s* (?(?:\b(?:operator)))\s* - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s* (?=\() beginCaptures: @@ -994,27 +1032,32 @@ repository: patterns: - match: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ - (?\g)\s+ - \b(?in)\b + (\g)\s+ + \b(in)\b captures: '1': patterns: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. - '3': { name: variable.local.cs } - '4': { name: keyword.control.loop.in.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: variable.local.cs } + '7': { name: keyword.control.loop.in.cs } - include: '#expression' - include: '#statement' @@ -1051,25 +1094,30 @@ repository: patterns: - match: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? - ) - (?:\s+\b(?\g)\b)? + (?\s*\((?:[^\(\)]|\g)+\)) + )\s* + (?:\b(\g)\b)? captures: '1': patterns: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. - '3': { name: variable.local.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: variable.local.cs } - include: '#when-clause' - include: '#block' - include: '#finally-clause' @@ -1141,25 +1189,31 @@ repository: local-variable-declaration: begin: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ - (?\g)\s* + (\g)\s* (?=,|;|=|\)) beginCaptures: '1': patterns: - include: '#type' - '2': { name: variable.local.cs } + # '2': ? is a sub-expression. It's final value is not considered. + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: variable.local.cs } end: (?=;|\)) patterns: - name: variable.local.cs @@ -1172,26 +1226,32 @@ repository: begin: |- (?x) (?\b(?:const)\b)\s* - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s+ - (?\g)\s* + (\g)\s* (?=,|;|=) beginCaptures: '1': { name: storage.modifier.cs } '2': patterns: - include: '#type' - '3': { name: variable.local.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: variable.local.cs } end: (?=;) patterns: - name: variable.local.cs @@ -1381,17 +1441,19 @@ repository: match: |- (?x) (\()\s* - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s* (\))(?=\s*[_[:alnum:]\(]) captures: @@ -1400,7 +1462,10 @@ repository: patterns: - include: '#type' # '3': ? is a sub-expression. It's final value is not considered. - '4': { name: punctuation.parenthesis.close.cs } + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: punctuation.parenthesis.close.cs } this-or-base-expression: match: \b(?:(base)|(this))\b @@ -1411,16 +1476,16 @@ repository: invocation-expression: begin: |- (?x) - (\.)? # preceding dot - ([_[:alpha:]][_[:alnum:]]*)\s* # method name - (?\s*<([^<>]|\g)+>\s*)?\s* # type parameters - (?=\() # open paren of argument list + (\.)? # preceding dot + ([_[:alpha:]][_[:alnum:]]*)\s* # method name + (?\s*<([^<>]|\g)+>\s*)?\s* # type arguments + (?=\() # open paren of argument list beginCaptures: '1': { name: punctuation.accessor.cs } '2': { name: entity.name.function.cs } '3': patterns: - - include: '#type-parameters' + - include: '#type-arguments' end: (?<=\)) patterns: - include: '#argument-list' @@ -1463,7 +1528,7 @@ repository: '2': { name: variable.other.object.cs } '3': patterns: - - include: '#type-parameters' + - include: '#type-arguments' # An identifier with no type parameters (and no dot to the left per the # matches above) should be treated as an object. - match: |- @@ -1477,17 +1542,19 @@ repository: begin: |- (?x) (new)\s+ - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s* (?=\() beginCaptures: @@ -1503,17 +1570,19 @@ repository: begin: |- (?x) (new)\s+ - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) )\s* (?=\[) beginCaptures: @@ -1615,8 +1684,9 @@ repository: - include: '#tuple-type' - include: '#type-builtin' - include: '#type-name' - - include: '#type-parameters' + - include: '#type-arguments' - include: '#type-array-suffix' + - include: '#type-nullable-suffix' tuple-type: patterns: @@ -1633,25 +1703,30 @@ repository: tuple-element: match: |- (?x) - (?(?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? + (? (?: - (?:\g(?:\s*\.\s*\g)*) - (?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)? - (?:(?:\*)*)? - (?:(?:\[,*\])*)? - (?:\s*\.\s*\g)* + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? )| - (?:\s*\(\s*(?:\g)(?:\s+\g)?(?:\s*,\s*(?:\g)(?:\s+\g)?)*\s*\)\s*)) - (?:(?:\[,*\])*)? + (?\s*\((?:[^\(\)]|\g)+\)) ) - (?:\s+(?\g))? + (?:\b(?\g)\b)? captures: '1': patterns: - include: '#type' # '2': ? is a sub-expression. It's final value is not considered. - '3': { name: entity.name.variable.tuple.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + '6': { name: entity.name.variable.tuple.cs } type-builtin: match: \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort|void)\b @@ -1675,7 +1750,7 @@ repository: - name: storage.type.cs match: '[_[:alpha:]][_[:alnum:]]*' - type-parameters: + type-arguments: name: meta.type.parameters.cs begin: '<' beginCaptures: @@ -1698,6 +1773,11 @@ repository: patterns: - include: '#punctuation-comma' + type-nullable-suffix: + match: '\?' + captures: + '0': { name: punctuation.separator.question-mark.cs } + operator-assignment: name: keyword.operator.assignment.cs match: (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) +) +``` + #### Dotted name * Expression: `([_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)` diff --git a/test/syntaxes/type-names.test.syntax.ts b/test/syntaxes/type-names.test.syntax.ts index 32c2f90124..55c80b2afb 100644 --- a/test/syntaxes/type-names.test.syntax.ts +++ b/test/syntaxes/type-names.test.syntax.ts @@ -147,5 +147,17 @@ describe("Grammar", () => { Token.Identifiers.FieldName("x"), Token.Punctuation.Semicolon]); }); + + it("nullable type - int?", () => { + + const input = Input.InClass(`int? x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Punctuation.QuestionMark, + Token.Identifiers.FieldName("x"), + Token.Punctuation.Semicolon]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index acedc01822..385996b20f 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -360,18 +360,6 @@ export namespace Token { } export namespace Punctuation { - export const Accessor = createToken('.', 'punctuation.accessor.cs'); - export const CloseBrace = createToken('}', 'punctuation.curlybrace.close.cs'); - export const CloseBracket = createToken(']', 'punctuation.squarebracket.close.cs'); - export const CloseParen = createToken(')', 'punctuation.parenthesis.close.cs'); - export const Colon = createToken(':', 'punctuation.separator.colon.cs'); - export const ColonColon = createToken('::', 'punctuation.separator.coloncolon.cs'); - export const Comma = createToken(',', 'punctuation.separator.comma.cs'); - export const Hash = createToken('#', 'punctuation.separator.hash.cs') - export const OpenBrace = createToken('{', 'punctuation.curlybrace.open.cs'); - export const OpenBracket = createToken('[', 'punctuation.squarebracket.open.cs'); - export const OpenParen = createToken('(', 'punctuation.parenthesis.open.cs'); - export namespace Interpolation { export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); @@ -383,8 +371,6 @@ export namespace Token { export const VerbatimBegin = createToken('$@"', 'punctuation.definition.string.begin.cs'); } - export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); - export namespace String { export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); export const End = createToken('"', 'punctuation.definition.string.end.cs'); @@ -396,6 +382,19 @@ export namespace Token { export const End = createToken('>', 'punctuation.definition.typeparameters.end.cs'); } + export const Accessor = createToken('.', 'punctuation.accessor.cs'); + export const CloseBrace = createToken('}', 'punctuation.curlybrace.close.cs'); + export const CloseBracket = createToken(']', 'punctuation.squarebracket.close.cs'); + export const CloseParen = createToken(')', 'punctuation.parenthesis.close.cs'); + export const Colon = createToken(':', 'punctuation.separator.colon.cs'); + export const ColonColon = createToken('::', 'punctuation.separator.coloncolon.cs'); + export const Comma = createToken(',', 'punctuation.separator.comma.cs'); + export const Hash = createToken('#', 'punctuation.separator.hash.cs') + export const OpenBrace = createToken('{', 'punctuation.curlybrace.open.cs'); + export const OpenBracket = createToken('[', 'punctuation.squarebracket.open.cs'); + export const OpenParen = createToken('(', 'punctuation.parenthesis.open.cs'); + export const QuestionMark = createToken('?', 'punctuation.separator.question-mark.cs'); + export const Semicolon = createToken(';', 'punctuation.terminator.statement.cs'); export const Tilde = createToken('~', 'punctuation.tilde.cs'); } From 3dca527a98194b7857206a17608005d88d9b32d5 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 06:21:36 -0800 Subject: [PATCH 105/192] Add support for var in local variable declarations and foreach statements --- syntaxes/csharp.tmLanguage.yml | 86 ++++++++++--------- test/syntaxes/cast-expressions.test.syntax.ts | 4 +- test/syntaxes/constructors.test.syntax.ts | 8 +- .../element-access-expressions.test.syntax.ts | 16 ++-- .../invocation-expressions.test.syntax.ts | 8 +- .../iteration-statements.test.syntax.ts | 2 +- test/syntaxes/using-statements.test.syntax.ts | 4 +- test/syntaxes/utils/tokenize.ts | 1 + 8 files changed, 69 insertions(+), 60 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index cf812a3a15..5e31773111 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1032,32 +1032,36 @@ repository: patterns: - match: |- (?x) - (? - (?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification - (? # identifier + type arguments (if any) - \g\s* - (?\s*<(?:[^<>]|\g)+>\s*)? - ) - (?:\s*\.\s*\g)* # Are there any more names being dotted into? - (?:\s*\*\s*)* # pointer suffix? - (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? - )| - (?\s*\((?:[^\(\)]|\g)+\)) + (?: + (\bvar\b)| + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + ) )\s+ (\g)\s+ \b(in)\b captures: - '1': + '1': { name: keyword.other.var.cs } + '2': patterns: - include: '#type' - # '2': ? is a sub-expression. It's final value is not considered. - # '3': ? is a sub-expression. It's final value is not considered. - # '4': ? is a sub-expression. It's final value is not considered. - # '5': ? is a sub-expression. It's final value is not considered. - '6': { name: variable.local.cs } - '7': { name: keyword.control.loop.in.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: variable.local.cs } + '8': { name: keyword.control.loop.in.cs } - include: '#expression' - include: '#statement' @@ -1189,31 +1193,35 @@ repository: local-variable-declaration: begin: |- (?x) - (? - (?: - (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification - (? # identifier + type arguments (if any) - \g\s* - (?\s*<(?:[^<>]|\g)+>\s*)? - ) - (?:\s*\.\s*\g)* # Are there any more names being dotted into? - (?:\s*\*\s*)* # pointer suffix? - (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? - )| - (?\s*\((?:[^\(\)]|\g)+\)) + (?: + (\bvar\b)| + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + ) )\s+ (\g)\s* (?=,|;|=|\)) beginCaptures: - '1': + '1': { name: keyword.other.var.cs } + '2': patterns: - include: '#type' - # '2': ? is a sub-expression. It's final value is not considered. - # '3': ? is a sub-expression. It's final value is not considered. - # '4': ? is a sub-expression. It's final value is not considered. - # '5': ? is a sub-expression. It's final value is not considered. - '6': { name: variable.local.cs } + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: variable.local.cs } end: (?=;|\)) patterns: - name: variable.local.cs diff --git a/test/syntaxes/cast-expressions.test.syntax.ts b/test/syntaxes/cast-expressions.test.syntax.ts index 1cef5fcbe9..08e3c2b050 100644 --- a/test/syntaxes/cast-expressions.test.syntax.ts +++ b/test/syntaxes/cast-expressions.test.syntax.ts @@ -15,7 +15,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Punctuation.OpenParen, @@ -31,7 +31,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Punctuation.OpenParen, diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index 1878ab4626..eeaf445aa3 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -307,19 +307,19 @@ public class A Token.Punctuation.CloseParen, Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("a"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Semicolon, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("b"), Token.Operators.Assignment, Token.Punctuation.String.Begin, Token.Literals.String("abc"), Token.Punctuation.String.End, Token.Punctuation.Semicolon, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("c"), Token.Operators.Assignment, Token.Keywords.New, @@ -330,7 +330,7 @@ public class A Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("c"), Token.Operators.Assignment, Token.Keywords.New, diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts index 49ed323122..f901f06cb9 100644 --- a/test/syntaxes/element-access-expressions.test.syntax.ts +++ b/test/syntaxes/element-access-expressions.test.syntax.ts @@ -15,7 +15,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -30,7 +30,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -46,7 +46,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -64,7 +64,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -86,7 +86,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -103,7 +103,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Property("P"), @@ -120,7 +120,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("C"), @@ -140,7 +140,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("N"), diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts index 3e134d4641..4e017d6031 100644 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ b/test/syntaxes/invocation-expressions.test.syntax.ts @@ -192,7 +192,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("N"), @@ -214,7 +214,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Variables.Object("N"), @@ -233,7 +233,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.This, @@ -252,7 +252,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Identifiers.MethodName("M"), diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 0b8e9fe840..8e15067ca0 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -158,7 +158,7 @@ foreach (var s in myList) tokens.should.deep.equal([ Token.Keywords.ForEach, Token.Punctuation.OpenParen, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("s"), Token.Keywords.In, Token.Variables.ReadWrite("myList"), diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts index 8b33ee054f..6281826923 100644 --- a/test/syntaxes/using-statements.test.syntax.ts +++ b/test/syntaxes/using-statements.test.syntax.ts @@ -105,7 +105,7 @@ using (var o = new object()) tokens.should.deep.equal([ Token.Keywords.Using, Token.Punctuation.OpenParen, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, @@ -131,7 +131,7 @@ using (var o = new object()) tokens.should.deep.equal([ Token.Keywords.Using, Token.Punctuation.OpenParen, - Token.Type("var"), + Token.Keywords.Var, Token.Variables.Local("o"), Token.Operators.Assignment, Token.Keywords.New, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 385996b20f..78109db916 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -274,6 +274,7 @@ export namespace Token { export const TypeOf = createToken('typeof', 'keyword.other.typeof.cs'); export const Unchecked = createToken('unchecked', 'keyword.other.unchecked.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); + export const Var = createToken('var', 'keyword.other.var.cs'); export const When = createToken('when', 'keyword.control.try.when.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); export const While = createToken('while', 'keyword.control.loop.while.cs'); From b4e05bf6e8016fba59ad2577b0d9f74d4dba5cab Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 09:26:16 -0800 Subject: [PATCH 106/192] Add support for query expressions --- syntaxes/csharp.tmLanguage.yml | 186 ++++++++++++++++- test/syntaxes/queries.test.syntax.ts | 296 +++++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 18 ++ 3 files changed, 497 insertions(+), 3 deletions(-) create mode 100644 test/syntaxes/queries.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 5e31773111..05001fe069 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,9 +8,14 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * query expressions # * lambda expressions and anonymous functions +# * anonymous object creation expressions # * object creation with object initializer and no parentheses +# * is and as cast expressions +# * null coalescing operator +# * null propagating operator +# * conditional operator +# * default expression # * compound assignement # * char literals # * verbatim identifiers @@ -137,6 +142,7 @@ repository: - include: '#this-or-base-expression' - include: '#expression-operators' - include: '#cast-expression' + - include: '#query-expression' - include: '#object-creation-expression' - include: '#array-creation-expression' - include: '#member-access-expression' @@ -1577,7 +1583,7 @@ repository: array-creation-expression: begin: |- (?x) - (new)\s+ + \b(new)\b\s* (? (?: (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification @@ -1591,7 +1597,7 @@ repository: (?:\s*\[,*\]\s*)? # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) - )\s* + )?\s* (?=\[) beginCaptures: '1': { name: keyword.other.new.cs } @@ -1685,6 +1691,180 @@ repository: match: \b(ref|out)\b - include: '#expression' + query-expression: + patterns: + - include: '#from-clause' + - include: '#let-clause' + - include: '#where-clause' + - include: '#join-clause' + - include: '#orderby-clause' + - include: '#select-clause' + - include: '#group-clause' + + from-clause: + begin: |- + (?x) + \b(from)\b\s* + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )? + \b(\g)\b\s* + \b(in)\b\s* + beginCaptures: + '1': { name: keyword.query.from.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: entity.name.variable.range-variable.cs } + '8': { name: keyword.query.in.cs } + end: (?=;) + patterns: + - include: '#expression' + + let-clause: + begin: |- + (?x) + \b(let)\b\s* + \b([_[:alpha:]][_[:alnum:]]*)\b\s* + (=)\s* + beginCaptures: + '1': { name: keyword.query.let.cs } + '2': { name: entity.name.variable.range-variable.cs } + '3': { name: keyword.operator.assignment.cs } + end: (?=;) + patterns: + - include: '#expression' + + where-clause: + begin: |- + (?x) + \b(where)\b\s* + beginCaptures: + '1': { name: keyword.query.where.cs } + end: (?=;) + patterns: + - include: '#expression' + + join-clause: + begin: |- + (?x) + \b(join)\b\s* + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )? + \b(\g)\b\s* + \b(in)\b\s* + beginCaptures: + '1': { name: keyword.query.join.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: entity.name.variable.range-variable.cs } + '8': { name: keyword.query.in.cs } + end: (?=;) + patterns: + - include: '#join-on' + - include: '#join-equals' + - include: '#join-into' + - include: '#expression' + + join-on: + match: \b(on)\b\s* + captures: + '1': { name: keyword.query.on.cs } + + join-equals: + match: \b(equals)\b\s* + captures: + '1': { name: keyword.query.equals.cs } + + join-into: + match: |- + (?x) + \b(into)\b\s* + \b([_[:alpha:]][_[:alnum:]]*)\b\s* + captures: + '1': { name: keyword.query.into.cs } + '2': { name: entity.name.variable.range-variable.cs } + + orderby-clause: + begin: \b(orderby)\b\s* + beginCaptures: + '1': { name: keyword.query.orderby.cs } + end: (?=;) + patterns: + - include: '#ordering-direction' + - include: '#expression' + - include: '#punctuation-comma' + + ordering-direction: + match: \b(?:(ascending)|(descending))\b + captures: + '1': { name: keyword.query.ascending.cs } + '2': { name: keyword.query.descending.cs } + + select-clause: + begin: \b(select)\b\s* + beginCaptures: + '1': { name: keyword.query.select.cs } + end: (?=;) + patterns: + - include: '#expression' + + group-clause: + begin: \b(group)\b\s* + beginCaptures: + '1': { name: keyword.query.group.cs } + end: (?=;) + patterns: + - include: '#group-by' + - include: '#group-into' + - include: '#expression' + + group-by: + match: \b(by)\b\s* + captures: + '1': { name: keyword.query.by.cs } + + group-into: + match: |- + (?x) + \b(into)\b\s* + \b([_[:alpha:]][_[:alnum:]]*)\b\s* + captures: + '1': { name: keyword.query.into.cs } + '2': { name: entity.name.variable.range-variable.cs } + type: name: meta.type.cs patterns: diff --git a/test/syntaxes/queries.test.syntax.ts b/test/syntaxes/queries.test.syntax.ts new file mode 100644 index 0000000000..de8e7ef231 --- /dev/null +++ b/test/syntaxes/queries.test.syntax.ts @@ -0,0 +1,296 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Query expressions", () => { + it("from clause", () => { + const input = Input.InMethod(`var q = from n in numbers`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers") + ]); + }); + + it("from clause with type", () => { + const input = Input.InMethod(`var q = from int n in numbers`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.PrimitiveType.Int, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers") + ]); + }); + + it("from clause followed by from clause", () => { + const input = Input.InMethod(` +var q = from x in list1 + from y in list2 +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("x"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("list1"), + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("y"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("list2") + ]); + }); + + it("from clause, join clause", () => { + const input = Input.InMethod(` +var q = from c in customers + join o in orders on c.CustomerID equals o.CustomerID +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Join, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.On, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Equals, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID") + ]); + }); + + it("from clause, join-into clause", () => { + const input = Input.InMethod(` +var q = from c in customers + join o in orders on c.CustomerID equals o.CustomerID into co +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Join, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.On, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Equals, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Into, + Token.Identifiers.RangeVariableName("co") + ]); + }); + + it("from clause, orderby", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name, o.Total +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total") + ]); + }); + + it("from clause, orderby ascending", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name ascending, o.Total +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Keywords.Queries.Ascending, + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total") + ]); + }); + + it("from clause, orderby descending", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name, o.Total descending +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total"), + Token.Keywords.Queries.Descending + ]); + }); + + it("from and select", () => { + const input = Input.InMethod(` +var q = from n in numbers + select n;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers"), + Token.Keywords.Queries.Select, + Token.Variables.ReadWrite("n"), + Token.Punctuation.Semicolon + ]); + }); + + it("from and select with complex expressions", () => { + const input = Input.InMethod(` +var q = from n in new[] { 1, 3, 5, 7, 9 } + select n % 4 * 6;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Keywords.New, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("3"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("5"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("7"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("9"), + Token.Punctuation.CloseBrace, + Token.Keywords.Queries.Select, + Token.Variables.ReadWrite("n"), + Token.Operators.Arithmetic.Remainder, + Token.Literals.Numeric.Decimal("4"), + Token.Operators.Arithmetic.Multiplication, + Token.Literals.Numeric.Decimal("6"), + Token.Punctuation.Semicolon + ]); + }); + + it("from and group by", () => { + const input = Input.InMethod(` +var q = from c in customers + group c by c.Country into g`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Group, + Token.Variables.ReadWrite("c"), + Token.Keywords.Queries.By, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("Country"), + Token.Keywords.Queries.Into, + Token.Identifiers.RangeVariableName("g") + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 78109db916..80db455a1e 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -186,6 +186,7 @@ export namespace Token { export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); export const PreprocessorSymbol = (text: string) => createToken(text, 'entity.name.variable.preprocessor.symbol.cs'); export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); + export const RangeVariableName = (text: string) => createToken(text, 'entity.name.variable.range-variable.cs'); export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); export const TypeParameterName = (text: string) => createToken(text, 'entity.name.type.type-parameter.cs'); } @@ -231,6 +232,23 @@ export namespace Token { export const Warning = createToken('warning', 'keyword.preprocessor.warning.cs'); } + export namespace Queries { + export const Ascending = createToken('ascending', 'keyword.query.ascending.cs'); + export const By = createToken('by', 'keyword.query.by.cs'); + export const Descending = createToken('descending', 'keyword.query.descending.cs'); + export const Equals = createToken('equals', 'keyword.query.equals.cs'); + export const From = createToken('from', 'keyword.query.from.cs'); + export const Group = createToken('group', 'keyword.query.group.cs'); + export const In = createToken('in', 'keyword.query.in.cs'); + export const Into = createToken('into', 'keyword.query.into.cs'); + export const Join = createToken('join', 'keyword.query.join.cs'); + export const Let = createToken('let', 'keyword.query.let.cs'); + export const On = createToken('on', 'keyword.query.on.cs'); + export const OrderBy = createToken('orderby', 'keyword.query.orderby.cs'); + export const Select = createToken('select', 'keyword.query.select.cs'); + export const Where = createToken('where', 'keyword.query.where.cs'); + } + export const Add = createToken('add', 'keyword.other.add.cs'); export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); From 7475dd2d9f7b9c2d9912d7634f64ae8b52f02a09 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 10:36:58 -0800 Subject: [PATCH 107/192] Add regression test for issue #1106 and fix enough to make it pass --- syntaxes/csharp.tmLanguage.yml | 81 +++++++++- test/syntaxes/char-literals.test.syntax.ts | 54 +++++++ test/syntaxes/queries.test.syntax.ts | 163 ++++++++++++++++++++- test/syntaxes/utils/tokenize.ts | 8 +- 4 files changed, 296 insertions(+), 10 deletions(-) create mode 100644 test/syntaxes/char-literals.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 05001fe069..895097ad6e 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -10,15 +10,14 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * Refinement and tests to ensure proper highlighting while typing # * lambda expressions and anonymous functions # * anonymous object creation expressions -# * object creation with object initializer and no parentheses # * is and as cast expressions # * null coalescing operator # * null propagating operator # * conditional operator # * default expression # * compound assignement -# * char literals # * verbatim identifiers +# * hexadecimal and unicode character escape sequences # * unsafe code: fixed, sizeof, unsafe blocks, unsafe keyword # * XML doc comments @@ -143,6 +142,7 @@ repository: - include: '#expression-operators' - include: '#cast-expression' - include: '#query-expression' + - include: '#anonymous-method-expression' - include: '#object-creation-expression' - include: '#array-creation-expression' - include: '#member-access-expression' @@ -1339,6 +1339,7 @@ repository: - include: '#boolean-literal' - include: '#null-literal' - include: '#numeric-literal' + - include: '#char-literal' - include: '#string-literal' - include: '#verbatim-string-literal' @@ -1348,7 +1349,6 @@ repository: match: (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[,*\]\s*)? # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )\s* + (?=\{|$) + captures: + '1': { name: keyword.other.new.cs } + '2': + patterns: + - include: '#type' + array-creation-expression: begin: |- (?x) @@ -1865,6 +1907,37 @@ repository: '1': { name: keyword.query.into.cs } '2': { name: entity.name.variable.range-variable.cs } + anonymous-method-expression: + patterns: + - begin: |- + (?x) + (?:\b(async)\b\s*)? + \b([_[:alpha:]][_[:alnum:]]*)\b\s* + (=>) + beginCaptures: + '1': { name: storage.modifier.cs } + '2': { name: variable.parameter.cs } + '3': { name: keyword.operator.arrow.cs } + end: (?=\)|;) + patterns: + - include: '#block' + - include: '#expression' + - begin: |- + (?x) + (?:\b(async)\b\s*)? + (\(.*\))\s* + (=>) + beginCaptures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#parenthesized-parameter-list' + '3': { name: keyword.operator.arrow.cs } + end: (?=\)|;) + patterns: + - include: '#block' + - include: '#expression' + type: name: meta.type.cs patterns: diff --git a/test/syntaxes/char-literals.test.syntax.ts b/test/syntaxes/char-literals.test.syntax.ts new file mode 100644 index 0000000000..073b910e37 --- /dev/null +++ b/test/syntaxes/char-literals.test.syntax.ts @@ -0,0 +1,54 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Literals - char", () => { + it("empty", () => { + const input = Input.InMethod(`var x = '';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + + it("letter", () => { + const input = Input.InMethod(`var x = 'a';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Literals.Char("a"), + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + + it("escaped single quote", () => { + const input = Input.InMethod(`var x = '\\'';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Literals.CharacterEscape("\\'"), + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/queries.test.syntax.ts b/test/syntaxes/queries.test.syntax.ts index de8e7ef231..79f017a937 100644 --- a/test/syntaxes/queries.test.syntax.ts +++ b/test/syntaxes/queries.test.syntax.ts @@ -84,7 +84,7 @@ var q = from c in customers Token.Variables.ReadWrite("orders"), Token.Keywords.Queries.On, Token.Variables.Object("c"), - Token.Punctuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Property("CustomerID"), Token.Keywords.Queries.Equals, Token.Variables.Object("o"), @@ -114,7 +114,7 @@ var q = from c in customers Token.Variables.ReadWrite("orders"), Token.Keywords.Queries.On, Token.Variables.Object("c"), - Token.Punctuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Property("CustomerID"), Token.Keywords.Queries.Equals, Token.Variables.Object("o"), @@ -144,7 +144,7 @@ var q = from o in orders Token.Variables.Object("o"), Token.Punctuation.Accessor, Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Property("Name"), Token.Punctuation.Comma, Token.Variables.Object("o"), @@ -172,7 +172,7 @@ var q = from o in orders Token.Variables.Object("o"), Token.Punctuation.Accessor, Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Property("Name"), Token.Keywords.Queries.Ascending, Token.Punctuation.Comma, @@ -201,7 +201,7 @@ var q = from o in orders Token.Variables.Object("o"), Token.Punctuation.Accessor, Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, + Token.Punctuation.Accessor, Token.Variables.Property("Name"), Token.Punctuation.Comma, Token.Variables.Object("o"), @@ -292,5 +292,158 @@ var q = from c in customers Token.Identifiers.RangeVariableName("g") ]); }); + + it("highlight complex query properly (issue #1106)", () => { + const input = Input.InClass(` +private static readonly Parser NodeParser = + from name in NodeName.Token() + from type in NodeValueType.Token() + from eq in Parse.Char('=') + from value in QuotedString.Token() + from lcurl in Parse.Char('{').Token() + from children in Parse.Ref(() => ChildrenNodesParser) + from rcurl in Parse.Char('}').Token() + select new Node + { + Name = name, + Type = type, + Value = value, + Children = children + }; +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.Static, + Token.Keywords.Modifiers.ReadOnly, + Token.Type("Parser"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Node"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.FieldName("NodeParser"), + Token.Operators.Assignment, + + // from name in NodeName.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("name"), + Token.Keywords.Queries.In, + Token.Variables.Object("NodeName"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from type in NodeValueType.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("type"), + Token.Keywords.Queries.In, + Token.Variables.Object("NodeValueType"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from eq in Parse.Char('=') + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("eq"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("="), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + + // from value in QuotedString.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("value"), + Token.Keywords.Queries.In, + Token.Variables.Object("QuotedString"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from lcurl in Parse.Char('{').Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("lcurl"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("{"), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from children in Parse.Ref(() => ChildrenNodesParser) + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("children"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Ref"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.ReadWrite("ChildrenNodesParser"), + Token.Punctuation.CloseParen, + + // from rcurl in Parse.Char('}').Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("rcurl"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("}"), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // select new Node + // { + // Name = name, + // Type = type, + // Value = value, + // Children = children + // }; + Token.Keywords.Queries.Select, + Token.Keywords.New, + Token.Type("Node"), + Token.Punctuation.OpenBrace, + Token.Variables.ReadWrite("Name"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("name"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Type"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("type"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Value"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("value"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Children"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("children"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 80db455a1e..31ee6e243b 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -313,6 +313,7 @@ export namespace Token { export const Hexadecimal = (text: string) => createToken(text, 'constant.numeric.hex.cs'); } + export const Char = (text: string) => createToken(text, 'string.quoted.single.cs'); export const CharacterEscape = (text: string) => createToken(text, 'constant.character.escape.cs'); export const String = (text: string) => createToken(text, 'string.quoted.double.cs'); } @@ -379,7 +380,12 @@ export namespace Token { } export namespace Punctuation { - export namespace Interpolation { + export namespace Char { + export const Begin = createToken('\'', 'punctuation.definition.char.begin.cs'); + export const End = createToken('\'', 'punctuation.definition.char.end.cs'); + } + + export namespace Interpolation { export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); } From 931498c1e9289a2a8f28b08f0d7b8fe964eadff6 Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Mon, 9 Jan 2017 10:39:32 -0800 Subject: [PATCH 108/192] Rajkumar42/telemetry/installer (#1113) * Sends "AcquisitionStart" telemetry to indicate an acquisition started. --- src/main.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main.ts b/src/main.ts index 9b7d8a57ca..e7c6fd093a 100644 --- a/src/main.ts +++ b/src/main.ts @@ -66,6 +66,9 @@ function installRuntimeDependencies(extension: vscode.Extension, logger: Lo } }; + // Sends "AcquisitionStart" telemetry to indicate an acquisition started. + reporter.sendTelemetryEvent("AcquisitionStart"); + let platformInfo: PlatformInformation; let packageManager: PackageManager; let installationStage = 'touchBeginFile'; From b337d17fccef8b266c516e7e7bbc5e0be3cb7828 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 11:30:03 -0800 Subject: [PATCH 109/192] Fix array suffixes for jagged arrays and multi-dimensional arrays with spaces --- syntaxes/csharp.tmLanguage.yml | 38 +++++++++++++++++----------------- syntaxes/syntax.md | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 895097ad6e..71d518b03e 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -294,7 +294,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -472,7 +472,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -510,7 +510,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -553,7 +553,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -599,7 +599,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -678,7 +678,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -759,7 +759,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -798,7 +798,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -1050,7 +1050,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) ) @@ -1114,7 +1114,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -1211,7 +1211,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) ) @@ -1250,7 +1250,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s+ @@ -1477,7 +1477,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -1583,7 +1583,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -1611,7 +1611,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )\s* @@ -1636,7 +1636,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )?\s* @@ -1757,7 +1757,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )? @@ -1816,7 +1816,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) )? @@ -1974,7 +1974,7 @@ repository: (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) ) diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index 4411fdcdb6..c282c6d541 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -18,7 +18,7 @@ (?:\s*\.\s*\g)* # Are there any more names being dotted into? (?:\s*\*\s*)* # pointer suffix? (?:\s*\?\s*)? # nullable suffix? - (?:\s*\[,*\]\s*)? # array suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? )| (?\s*\((?:[^\(\)]|\g)+\)) ) From f4e79892422f1d4b0748ca4b97112c485cf0152e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 13:21:28 -0800 Subject: [PATCH 110/192] Fix several bugs --- syntaxes/csharp.tmLanguage.yml | 57 +++++++++++-------- .../syntaxes/checked-unchecked.test.syntax.ts | 48 ++++++++++++++++ .../selection-statements.test.syntax.ts | 47 +++++++++++++++ test/syntaxes/try-statements.test.syntax.ts | 24 ++++++++ 4 files changed, 153 insertions(+), 23 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 71d518b03e..815025ef72 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -116,7 +116,7 @@ repository: - include: '#switch-statement' - include: '#goto-statement' - include: '#return-statement' - - include: '#break-continue-statement' + - include: '#break-or-continue-statement' - include: '#throw-statement' - include: '#yield-statement' - include: '#try-statement' @@ -140,14 +140,15 @@ repository: - include: '#literal' - include: '#this-or-base-expression' - include: '#expression-operators' - - include: '#cast-expression' - include: '#query-expression' - include: '#anonymous-method-expression' - include: '#object-creation-expression' - include: '#array-creation-expression' + - include: '#anonymous-object-creation-expression' - include: '#member-access-expression' - include: '#invocation-expression' - include: '#element-access-expression' + - include: '#cast-expression' - include: '#parenthesized-expression' - include: '#initializer-expression' - include: '#identifier' @@ -875,7 +876,7 @@ repository: patterns: - include: '#expression' - break-continue-statement: + break-or-continue-statement: match: (? { + const input = ` +class C +{ + void M1() + { + checked + { + checked(++i); + } + } + void M2() { } +} +`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Class, + Token.Identifiers.ClassName("C"), + Token.Punctuation.OpenBrace, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("M1"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + + Token.Keywords.Checked, + Token.Punctuation.OpenBrace, + Token.Keywords.Checked, + Token.Punctuation.OpenParen, + Token.Operators.Increment, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + + Token.Punctuation.CloseBrace, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("M2"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); }); \ No newline at end of file diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts index 981a81e484..722b96f2c2 100644 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ b/test/syntaxes/selection-statements.test.syntax.ts @@ -11,6 +11,20 @@ describe("Grammar", () => { describe("Selection statements", () => { it("single-line if with embedded statement", () => { + const input = Input.InMethod(`if (true) return;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Keywords.Return, + Token.Punctuation.Semicolon + ]); + }); + + it("single-line if with embedded method call", () => { const input = Input.InMethod(`if (true) Do();`); const tokens = tokenize(input); @@ -300,5 +314,38 @@ switch (i) { Token.Punctuation.CloseBrace ]); }); + + it("if statement inside while statment with continue and break", () => { + const input = Input.InMethod(` +while (i < 10) +{ + ++i; + if (true) continue; + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.While, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("10"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Operators.Increment, + Token.Variables.ReadWrite("i"), + Token.Punctuation.Semicolon, + Token.Keywords.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Keywords.Continue, + Token.Punctuation.Semicolon, + Token.Keywords.Break, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/try-statements.test.syntax.ts b/test/syntaxes/try-statements.test.syntax.ts index 3889146a29..47fb27e040 100644 --- a/test/syntaxes/try-statements.test.syntax.ts +++ b/test/syntaxes/try-statements.test.syntax.ts @@ -180,5 +180,29 @@ catch (Exception) when (true) Token.Punctuation.CloseBrace ]); }); + + it("try-finally followed by statement", () => { + const input = Input.InMethod(` +try +{ +} +finally +{ +} +int x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Finally, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.PrimitiveType.Int, + Token.Variables.Local("x"), + Token.Punctuation.Semicolon + ]); + }); }); }); \ No newline at end of file From 2f93a87523cf90c1bf2777aee909b25a7571eb92 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 13:27:56 -0800 Subject: [PATCH 111/192] Ensure preprocessor works in enum member lists --- syntaxes/csharp.tmLanguage.yml | 1 + test/syntaxes/preprocessor.test.syntax.ts | 45 +++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 815025ef72..b530bef19d 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -345,6 +345,7 @@ repository: endCaptures: '0': { name: punctuation.curlybrace.close.cs } patterns: + - include: '#preprocessor' - include: '#comment' - include: '#attribute-section' - include: '#punctuation-comma' diff --git a/test/syntaxes/preprocessor.test.syntax.ts b/test/syntaxes/preprocessor.test.syntax.ts index 3bd0c03a94..6d148b215c 100644 --- a/test/syntaxes/preprocessor.test.syntax.ts +++ b/test/syntaxes/preprocessor.test.syntax.ts @@ -534,6 +534,51 @@ describe("Grammar", () => { ]); }); + it("preprocessor in enum members", () => { + const input = ` +public enum E +{ + A, + B = A, + C = 2 + A, + +#if DEBUG + D, +#endif + +}`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Enum, + Token.Identifiers.EnumName("E"), + Token.Punctuation.OpenBrace, + Token.Variables.EnumMember("A"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("B"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("A"), + Token.Punctuation.Comma, + Token.Variables.EnumMember("C"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("2"), + Token.Operators.Arithmetic.Addition, + Token.Variables.ReadWrite("A"), + Token.Punctuation.Comma, + + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.If, + Token.Identifiers.PreprocessorSymbol("DEBUG"), + Token.Variables.EnumMember("D"), + Token.Punctuation.Comma, + Token.Punctuation.Hash, + Token.Keywords.Preprocessor.EndIf, + + Token.Punctuation.CloseBrace + ]); + }); + it("region name with double-quotes should be highlighted properly (issue #731)", () => { const input = Input.InClass(` #region " Register / Create New " From 3435be20b0609436205f2d7de7b82d9046740261 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 13:35:49 -0800 Subject: [PATCH 112/192] Refine queries a bit so that clauses are only matched inside a query --- syntaxes/csharp.tmLanguage.yml | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index b530bef19d..206c483f52 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1746,16 +1746,6 @@ repository: - include: '#expression' query-expression: - patterns: - - include: '#from-clause' - - include: '#let-clause' - - include: '#where-clause' - - include: '#join-clause' - - include: '#orderby-clause' - - include: '#select-clause' - - include: '#group-clause' - - from-clause: begin: |- (?x) \b(from)\b\s* @@ -1788,8 +1778,18 @@ repository: '8': { name: keyword.query.in.cs } end: (?=;) patterns: + - include: '#query-body' - include: '#expression' + query-body: + patterns: + - include: '#let-clause' + - include: '#where-clause' + - include: '#join-clause' + - include: '#orderby-clause' + - include: '#select-clause' + - include: '#group-clause' + let-clause: begin: |- (?x) @@ -1802,6 +1802,7 @@ repository: '3': { name: keyword.operator.assignment.cs } end: (?=;) patterns: + - include: '#query-body' - include: '#expression' where-clause: @@ -1812,6 +1813,7 @@ repository: '1': { name: keyword.query.where.cs } end: (?=;) patterns: + - include: '#query-body' - include: '#expression' join-clause: @@ -1850,6 +1852,7 @@ repository: - include: '#join-on' - include: '#join-equals' - include: '#join-into' + - include: '#query-body' - include: '#expression' join-on: @@ -1878,6 +1881,7 @@ repository: end: (?=;) patterns: - include: '#ordering-direction' + - include: '#query-body' - include: '#expression' - include: '#punctuation-comma' @@ -1893,6 +1897,7 @@ repository: '1': { name: keyword.query.select.cs } end: (?=;) patterns: + - include: '#query-body' - include: '#expression' group-clause: @@ -1903,6 +1908,7 @@ repository: patterns: - include: '#group-by' - include: '#group-into' + - include: '#query-body' - include: '#expression' group-by: From 5728dfeec603d47a60441412804ffea09f946ab7 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 13:43:46 -0800 Subject: [PATCH 113/192] Add unsafe storage modifier --- syntaxes/csharp.tmLanguage.yml | 5 ++--- test/syntaxes/classes.test.syntax.ts | 15 +++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 206c483f52..c796fd40a6 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -9,7 +9,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # # * Refinement and tests to ensure proper highlighting while typing # * lambda expressions and anonymous functions -# * anonymous object creation expressions # * is and as cast expressions # * null coalescing operator # * null propagating operator @@ -18,7 +17,7 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences -# * unsafe code: fixed, sizeof, unsafe blocks, unsafe keyword +# * unsafe code: fixed, sizeof, unsafe blocks # * XML doc comments patterns: @@ -252,7 +251,7 @@ repository: storage-modifier: name: 'storage.modifier.cs' - match: (? { + const input = Input.InNamespace(` +unsafe class C +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Unsafe, + Token.Keywords.Class, + Token.Identifiers.ClassName("C"), + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 31ee6e243b..e19556ba59 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -209,6 +209,7 @@ export namespace Token { export const Ref = createToken('ref', 'storage.modifier.cs'); export const Sealed = createToken('sealed', 'storage.modifier.cs'); export const Static = createToken('static', 'storage.modifier.cs'); + export const Unsafe = createToken('unsafe', 'storage.modifier.cs'); export const Virtual = createToken('virtual', 'storage.modifier.cs'); } From 1a06b43f35d187452a8270850bfe70181be8b2db Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 14:03:21 -0800 Subject: [PATCH 114/192] Add tests for lambda expression support --- ...nonymous-method-expressions.test.syntax.ts | 348 ++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 2 files changed, 349 insertions(+) create mode 100644 test/syntaxes/anonymous-method-expressions.test.syntax.ts diff --git a/test/syntaxes/anonymous-method-expressions.test.syntax.ts b/test/syntaxes/anonymous-method-expressions.test.syntax.ts new file mode 100644 index 0000000000..b91acbb594 --- /dev/null +++ b/test/syntaxes/anonymous-method-expressions.test.syntax.ts @@ -0,0 +1,348 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Anonymous method expressions", () => { + it("lambda expression with no parameters (assignment)", () => { + const input = Input.InMethod(`Action a = () => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with no parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async () => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Action a = x => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Func a = async x => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (assignment)", () => { + const input = Input.InMethod(`Action a = (int x) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single typed parameter (assignment)", () => { + const input = Input.InMethod(`Func a = async (int x) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Action a = (int x, int y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async (int x, int y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with no parameters (passed as argument)", () => { + const input = Input.InMethod(`M(() => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with no parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async () => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single parameter (passed as argument)", () => { + const input = Input.InMethod(`M(x => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single parameter (passed as argument)", () => { + const input = Input.InMethod(`M(async x => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M((int x) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M((int x, int y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x, int y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index e19556ba59..24cd05ca8d 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -194,6 +194,7 @@ export namespace Token { export namespace Keywords { export namespace Modifiers { export const Abstract = createToken('abstract', 'storage.modifier.cs'); + export const Async = createToken('async', 'storage.modifier.cs'); export const Const = createToken('const', 'storage.modifier.cs'); export const In = createToken('in', 'storage.modifier.cs'); export const Internal = createToken('internal', 'storage.modifier.cs'); From c0ca262ff590e2ab1e5b6603a422499266442279 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 14:21:14 -0800 Subject: [PATCH 115/192] Allow constructor initializers to have line break between : and this/base --- syntaxes/csharp.tmLanguage.yml | 26 ++++++++++------- test/syntaxes/constructors.test.syntax.ts | 35 ++++++++++++++--------- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index c796fd40a6..c7f5094b97 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -714,24 +714,30 @@ repository: - include: '#block' constructor-declaration: - begin: ([_[:alpha:]][_[:alnum:]]*)\s*(?=\() - beginCaptures: - '1': { name: entity.name.function.cs } + begin: (?=[_[:alpha:]][_[:alnum:]]*\s*\() end: (?<=\})|(?=;) patterns: - - include: '#comment' + - match: \b([_[:alpha:]][_[:alnum:]]*)\b + captures: + '1': { name: entity.name.function.cs } + - begin: (:) + beginCaptures: + '1': { name: punctuation.separator.colon.cs } + end: (?=\{|=>) + patterns: + - include: '#constructor-initializer' - include: '#parenthesized-parameter-list' + - include: '#preprocessor' + - include: '#comment' - include: '#expression-body' - - include: '#constructor-initializer' - include: '#block' constructor-initializer: - begin: (:)\s*\b(?:(base)|(this))\b\s*(?=\() + begin: \b(?:(base)|(this))\b\s*(?=\() beginCaptures: - '1': { name: punctuation.separator.colon.cs } - '2': { name: keyword.other.base.cs } - '3': { name: keyword.other.this.cs } - end: (?=\{|;) + '1': { name: keyword.other.base.cs } + '2': { name: keyword.other.this.cs } + end: (?<=\)) patterns: - include: "#argument-list" diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index eeaf445aa3..57fc720ce7 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Constructors", () => { it("instance constructor with no parameters", () => { - const input = Input.InClass(`TestClass() { }`); const tokens = tokenize(input); @@ -24,7 +23,6 @@ describe("Grammar", () => { }); it("public instance constructor with no parameters", () => { - const input = Input.InClass(`public TestClass() { }`); const tokens = tokenize(input); @@ -38,7 +36,6 @@ describe("Grammar", () => { }); it("public instance constructor with one parameter", () => { - const input = Input.InClass(`public TestClass(int x) { }`); const tokens = tokenize(input); @@ -54,7 +51,6 @@ describe("Grammar", () => { }); it("public instance constructor with one ref parameter", () => { - const input = Input.InClass(`public TestClass(ref int x) { }`); const tokens = tokenize(input); @@ -71,7 +67,6 @@ describe("Grammar", () => { }); it("instance constructor with two parameters", () => { - const input = Input.InClass(` TestClass(int x, int y) { @@ -92,7 +87,6 @@ TestClass(int x, int y) }); it("instance constructor with expression body", () => { - const input = Input.InClass(`TestClass(int x, int y) => Foo();`); const tokens = tokenize(input); @@ -113,7 +107,6 @@ TestClass(int x, int y) }); it("static constructor no parameters", () => { - const input = Input.InClass(`TestClass() { }`); const tokens = tokenize(input); @@ -126,7 +119,6 @@ TestClass(int x, int y) }); it("instance constructor with 'this' initializer", () => { - const input = Input.InClass(`TestClass() : this(42) { }`); const tokens = tokenize(input); @@ -144,7 +136,6 @@ TestClass(int x, int y) }); it("public instance constructor with 'this' initializer", () => { - const input = Input.InClass(`public TestClass() : this(42) { }`); const tokens = tokenize(input); @@ -163,7 +154,6 @@ TestClass(int x, int y) }); it("instance constructor with 'this' initializer with ref parameter", () => { - const input = Input.InClass(`TestClass(int x) : this(ref x) { }`); const tokens = tokenize(input); @@ -184,7 +174,6 @@ TestClass(int x, int y) }); it("instance constructor with 'this' initializer with named parameter", () => { - const input = Input.InClass(`TestClass(int x) : this(y: x) { }`); const tokens = tokenize(input); @@ -206,7 +195,6 @@ TestClass(int x, int y) }); it("instance constructor with 'base' initializer", () => { - const input = Input.InClass(`TestClass() : base(42) { }`); const tokens = tokenize(input); @@ -223,8 +211,28 @@ TestClass(int x, int y) Token.Punctuation.CloseBrace]); }); - it("Open multiline comment in front of parameter highlights properly (issue #861)", () => { + it("instance constructor with 'base' initializer on separate line", () => { + const input = Input.InClass(` +TestClass() : + base(42) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("TestClass"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Colon, + Token.Keywords.Base, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace]); + }); + it("Open multiline comment in front of parameter highlights properly (issue #861)", () => { const input = Input.InClass(` internal WaitHandle(Task self, TT.Task /*task) { @@ -255,7 +263,6 @@ internal WaitHandle(Task self, TT.Task /*task) }); it("Highlight properly within base constructor initializer (issue #782)", () => { - const input = ` public class A { From 0496580abfbfaeb70633249fb107ce679dc2b76d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 15:05:12 -0800 Subject: [PATCH 116/192] Add support for default expressions --- syntaxes/csharp.tmLanguage.yml | 10 +-- test/syntaxes/expressions.test.syntax.ts | 38 +++++++++- test/syntaxes/indexers.test.syntax.ts | 2 +- .../iteration-statements.test.syntax.ts | 24 +++--- test/syntaxes/methods.test.syntax.ts | 2 +- test/syntaxes/operators.test.syntax.ts | 52 ++++++------- test/syntaxes/preprocessor.test.syntax.ts | 2 +- test/syntaxes/properties.test.syntax.ts | 4 +- .../selection-statements.test.syntax.ts | 76 +++++++++---------- test/syntaxes/string-literals.test.syntax.ts | 2 +- test/syntaxes/try-statements.test.syntax.ts | 34 ++++----- test/syntaxes/utils/tokenize.ts | 42 +++++----- test/syntaxes/yield-statements.test.syntax.ts | 8 +- 13 files changed, 167 insertions(+), 129 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index c7f5094b97..2ca4673bce 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -13,7 +13,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * null coalescing operator # * null propagating operator # * conditional operator -# * default expression # * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences @@ -133,7 +132,7 @@ repository: - include: '#preprocessor' - include: '#comment' - include: '#checked-unchecked-expression' - - include: '#typeof-expression' + - include: '#typeof-or-default-expression' - include: '#interpolated-string' - include: '#verbatim-interpolated-string' - include: '#literal' @@ -1288,11 +1287,12 @@ repository: patterns: - include: '#expression' - typeof-expression: - begin: (? { describe("Expressions", () => { it("array creation expression passed as argument", () => { - const input = Input.InMethod(`c.abst(ref s, new int[] {1, i, i});`); const tokens = tokenize(input); @@ -40,7 +39,6 @@ describe("Grammar", () => { }); it("arithmetic", () => { - const input = Input.InMethod(`b = this.i != 1 + (2 - 3);`); const tokens = tokenize(input); @@ -61,5 +59,41 @@ describe("Grammar", () => { Token.Punctuation.Semicolon ]); }); + + it("typeof", () => { + const input = Input.InMethod(`var t = typeof(List<>);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("t"), + Token.Operators.Assignment, + Token.Keywords.TypeOf, + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("default", () => { + const input = Input.InMethod(`var t = default(List<>);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("t"), + Token.Operators.Assignment, + Token.Keywords.Default, + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index 8228a6acd7..feb6efce05 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -31,7 +31,7 @@ public string this[int index] Token.Punctuation.OpenBrace, Token.Keywords.Get, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.Object("index"), Token.Punctuation.Accessor, Token.Identifiers.MethodName("ToString"), diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts index 8e15067ca0..c028c2abbd 100644 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ b/test/syntaxes/iteration-statements.test.syntax.ts @@ -15,7 +15,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.While, + Token.Keywords.Control.While, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -30,10 +30,10 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Do, + Token.Keywords.Control.Do, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.While, + Token.Keywords.Control.While, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -47,7 +47,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.For, + Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Variables.Local("i"), @@ -76,7 +76,7 @@ for (int i = 0; i < 42; i++) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.For, + Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Variables.Local("i"), @@ -91,7 +91,7 @@ for (int i = 0; i < 42; i++) Token.Operators.Increment, Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Break, + Token.Keywords.Control.Break, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, ]); @@ -107,7 +107,7 @@ for (int i = 0; i < 42; i++) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.For, + Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Variables.Local("i"), @@ -122,7 +122,7 @@ for (int i = 0; i < 42; i++) Token.Operators.Increment, Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Continue, + Token.Keywords.Control.Continue, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, ]); @@ -134,11 +134,11 @@ for (int i = 0; i < 42; i++) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.ForEach, + Token.Keywords.Control.ForEach, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Variables.Local("i"), - Token.Keywords.In, + Token.Keywords.Control.In, Token.Variables.ReadWrite("numbers"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, @@ -156,11 +156,11 @@ foreach (var s in myList) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.ForEach, + Token.Keywords.Control.ForEach, Token.Punctuation.OpenParen, Token.Keywords.Var, Token.Variables.Local("s"), - Token.Keywords.In, + Token.Keywords.Control.In, Token.Variables.ReadWrite("myList"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 21128b9cf5..b7b5eab7dd 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -44,7 +44,7 @@ int Add(int x, int y) Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts index b18b9b2d99..d021fc269b 100644 --- a/test/syntaxes/operators.test.syntax.ts +++ b/test/syntaxes/operators.test.syntax.ts @@ -26,7 +26,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("value"), Token.Punctuation.Semicolon, @@ -49,7 +49,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Operators.Arithmetic.Subtraction, Token.Variables.ReadWrite("value"), Token.Punctuation.Semicolon, @@ -72,7 +72,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.Equals, Token.Literals.Numeric.Decimal("0"), @@ -96,7 +96,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Operators.Bitwise.BitwiseComplement, Token.Variables.ReadWrite("value"), Token.Punctuation.Semicolon, @@ -119,7 +119,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Operators.Increment, Token.Variables.ReadWrite("value"), Token.Punctuation.Semicolon, @@ -142,7 +142,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Operators.Decrement, Token.Variables.ReadWrite("value"), Token.Punctuation.Semicolon, @@ -165,7 +165,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), @@ -189,7 +189,7 @@ describe("Grammar", () => { Token.Variables.Parameter("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("value"), Token.Operators.Relational.Equals, Token.Literals.Numeric.Decimal("0"), @@ -216,7 +216,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Addition, Token.Variables.ReadWrite("y"), @@ -243,7 +243,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Subtraction, Token.Variables.ReadWrite("y"), @@ -270,7 +270,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Multiplication, Token.Variables.ReadWrite("y"), @@ -297,7 +297,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Division, Token.Variables.ReadWrite("y"), @@ -324,7 +324,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Arithmetic.Remainder, Token.Variables.ReadWrite("y"), @@ -351,7 +351,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.And, Token.Variables.ReadWrite("y"), @@ -378,7 +378,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.Or, Token.Variables.ReadWrite("y"), @@ -405,7 +405,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ExclusiveOr, Token.Variables.ReadWrite("y"), @@ -432,7 +432,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ShiftLeft, Token.Variables.ReadWrite("y"), @@ -459,7 +459,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Bitwise.ShiftRight, Token.Variables.ReadWrite("y"), @@ -486,7 +486,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.Equals, Token.Variables.ReadWrite("y"), @@ -513,7 +513,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Variables.ReadWrite("y"), @@ -540,7 +540,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.GreaterThan, Token.Variables.ReadWrite("y"), @@ -567,7 +567,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.LessThan, Token.Variables.ReadWrite("y"), @@ -594,7 +594,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.GreaterThanOrEqual, Token.Variables.ReadWrite("y"), @@ -621,7 +621,7 @@ describe("Grammar", () => { Token.Variables.Parameter("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.LessThanOrEqual, Token.Variables.ReadWrite("y"), @@ -645,7 +645,7 @@ describe("Grammar", () => { Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), @@ -669,7 +669,7 @@ describe("Grammar", () => { Token.Variables.Parameter("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Variables.ReadWrite("x"), Token.Operators.Relational.NotEqual, Token.Literals.Numeric.Decimal("0"), diff --git a/test/syntaxes/preprocessor.test.syntax.ts b/test/syntaxes/preprocessor.test.syntax.ts index 6d148b215c..76121c71a3 100644 --- a/test/syntaxes/preprocessor.test.syntax.ts +++ b/test/syntaxes/preprocessor.test.syntax.ts @@ -621,7 +621,7 @@ public ActionResult Register() Token.Punctuation.String.End, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Identifiers.MethodName("View"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 16f686cb5e..81439ca5ea 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -27,7 +27,7 @@ public IBooom Property Token.Punctuation.OpenBrace, Token.Keywords.Get, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Literals.Null, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, @@ -53,7 +53,7 @@ public IBooom Property Token.Punctuation.OpenBrace, Token.Keywords.Get, Token.Punctuation.OpenBrace, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Literals.Null, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts index 722b96f2c2..f77380f7db 100644 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ b/test/syntaxes/selection-statements.test.syntax.ts @@ -15,11 +15,11 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, - Token.Keywords.Return, + Token.Keywords.Control.Return, Token.Punctuation.Semicolon ]); }); @@ -29,7 +29,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -45,7 +45,7 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -66,7 +66,7 @@ if (true) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -86,7 +86,7 @@ if (true) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -109,7 +109,7 @@ else const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -117,7 +117,7 @@ else Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, - Token.Keywords.Else, + Token.Keywords.Control.Else, Token.Identifiers.MethodName("Dont"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -138,7 +138,7 @@ else const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -148,7 +148,7 @@ else Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, - Token.Keywords.Else, + Token.Keywords.Control.Else, Token.Punctuation.OpenBrace, Token.Identifiers.MethodName("Dont"), Token.Punctuation.OpenParen, @@ -168,7 +168,7 @@ else if (false) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -176,8 +176,8 @@ else if (false) Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, - Token.Keywords.Else, - Token.Keywords.If, + Token.Keywords.Control.Else, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.False, Token.Punctuation.CloseParen, @@ -201,7 +201,7 @@ else if (false) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -211,8 +211,8 @@ else if (false) Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, - Token.Keywords.Else, - Token.Keywords.If, + Token.Keywords.Control.Else, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.False, Token.Punctuation.CloseParen, @@ -238,27 +238,27 @@ default: const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Switch, + Token.Keywords.Control.Switch, Token.Punctuation.OpenParen, Token.Variables.ReadWrite("i"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Case, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("0"), Token.Punctuation.Colon, - Token.Keywords.Goto, - Token.Keywords.Case, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Semicolon, - Token.Keywords.Case, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Colon, - Token.Keywords.Goto, - Token.Keywords.Default, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Default, Token.Punctuation.Semicolon, - Token.Keywords.Default, + Token.Keywords.Control.Default, Token.Punctuation.Colon, - Token.Keywords.Break, + Token.Keywords.Control.Break, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace ]); @@ -283,32 +283,32 @@ switch (i) { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Switch, + Token.Keywords.Control.Switch, Token.Punctuation.OpenParen, Token.Variables.ReadWrite("i"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, - Token.Keywords.Case, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("0"), Token.Punctuation.Colon, Token.Punctuation.OpenBrace, - Token.Keywords.Goto, - Token.Keywords.Case, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, - Token.Keywords.Case, + Token.Keywords.Control.Case, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Colon, Token.Punctuation.OpenBrace, - Token.Keywords.Goto, - Token.Keywords.Default, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Default, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, - Token.Keywords.Default, + Token.Keywords.Control.Default, Token.Punctuation.Colon, Token.Punctuation.OpenBrace, - Token.Keywords.Break, + Token.Keywords.Control.Break, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, Token.Punctuation.CloseBrace @@ -326,7 +326,7 @@ while (i < 10) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.While, + Token.Keywords.Control.While, Token.Punctuation.OpenParen, Token.Variables.ReadWrite("i"), Token.Operators.Relational.LessThan, @@ -336,13 +336,13 @@ while (i < 10) Token.Operators.Increment, Token.Variables.ReadWrite("i"), Token.Punctuation.Semicolon, - Token.Keywords.If, + Token.Keywords.Control.If, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, - Token.Keywords.Continue, + Token.Keywords.Control.Continue, Token.Punctuation.Semicolon, - Token.Keywords.Break, + Token.Keywords.Control.Break, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace ]); diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts index c28bbbaada..3b3b73bad1 100644 --- a/test/syntaxes/string-literals.test.syntax.ts +++ b/test/syntaxes/string-literals.test.syntax.ts @@ -163,7 +163,7 @@ throw new InvalidCastException( const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Throw, + Token.Keywords.Control.Throw, Token.Keywords.New, Token.Type("InvalidCastException"), Token.Punctuation.OpenParen, diff --git a/test/syntaxes/try-statements.test.syntax.ts b/test/syntaxes/try-statements.test.syntax.ts index 47fb27e040..e14c739650 100644 --- a/test/syntaxes/try-statements.test.syntax.ts +++ b/test/syntaxes/try-statements.test.syntax.ts @@ -21,7 +21,7 @@ finally const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Keywords.Finally, @@ -41,10 +41,10 @@ catch const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, + Token.Keywords.Control.Catch, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace ]); @@ -64,10 +64,10 @@ finally const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, + Token.Keywords.Control.Catch, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Keywords.Finally, @@ -87,10 +87,10 @@ catch (Exception) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, + Token.Keywords.Control.Catch, Token.Punctuation.OpenParen, Token.Type("Exception"), Token.Punctuation.CloseParen, @@ -110,10 +110,10 @@ catch (Exception ex) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, + Token.Keywords.Control.Catch, Token.Punctuation.OpenParen, Token.Type("Exception"), Token.Variables.Local("ex"), @@ -135,17 +135,17 @@ catch when (true) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, - Token.Keywords.Throw, + Token.Keywords.Control.Throw, Token.Keywords.New, Token.Type("Exception"), Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, - Token.Keywords.When, + Token.Keywords.Control.Catch, + Token.Keywords.Control.When, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -165,14 +165,14 @@ catch (Exception) when (true) const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, - Token.Keywords.Catch, + Token.Keywords.Control.Catch, Token.Punctuation.OpenParen, Token.Type("Exception"), Token.Punctuation.CloseParen, - Token.Keywords.When, + Token.Keywords.Control.When, Token.Punctuation.OpenParen, Token.Literals.Boolean.True, Token.Punctuation.CloseParen, @@ -193,7 +193,7 @@ int x;`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Try, + Token.Keywords.Control.Try, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Keywords.Finally, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 24cd05ca8d..dda3539896 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -192,6 +192,28 @@ export namespace Token { } export namespace Keywords { + export namespace Control { + export const Break = createToken('break', 'keyword.control.flow.break.cs'); + export const Case = createToken('case', 'keyword.control.case.cs'); + export const Catch = createToken('catch', 'keyword.control.try.catch.cs'); + export const Continue = createToken('continue', 'keyword.control.flow.continue.cs'); + export const Default = createToken('default', 'keyword.control.default.cs'); + export const Do = createToken('do', 'keyword.control.loop.do.cs'); + export const Else = createToken('else', 'keyword.control.conditional.else.cs'); + export const For = createToken('for', 'keyword.control.loop.for.cs'); + export const ForEach = createToken('foreach', 'keyword.control.loop.foreach.cs'); + export const Goto = createToken('goto', 'keyword.control.goto.cs'); + export const If = createToken('if', 'keyword.control.conditional.if.cs'); + export const In = createToken('in', 'keyword.control.loop.in.cs'); + export const Return = createToken('return', 'keyword.control.flow.return.cs'); + export const Switch = createToken('switch', 'keyword.control.switch.cs'); + export const Throw = createToken('throw', 'keyword.control.flow.throw.cs'); + export const Try = createToken('try', 'keyword.control.try.cs'); + export const When = createToken('when', 'keyword.control.try.when.cs'); + export const While = createToken('while', 'keyword.control.loop.while.cs'); + export const Yield = createToken('yield', 'keyword.control.flow.yield.cs'); + } + export namespace Modifiers { export const Abstract = createToken('abstract', 'storage.modifier.cs'); export const Async = createToken('async', 'storage.modifier.cs'); @@ -255,50 +277,32 @@ export namespace Token { export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); export const Base = createToken('base', 'keyword.other.base.cs'); - export const Break = createToken('break', 'keyword.control.flow.break.cs'); - export const Case = createToken('case', 'keyword.control.case.cs'); - export const Catch = createToken('catch', 'keyword.control.try.catch.cs'); export const Checked = createToken('checked', 'keyword.other.checked.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); - export const Continue = createToken('continue', 'keyword.control.flow.continue.cs'); - export const Default = createToken('default', 'keyword.control.default.cs'); + export const Default = createToken('default', 'keyword.other.default.cs'); export const Delegate = createToken('delegate', 'keyword.other.delegate.cs'); - export const Do = createToken('do', 'keyword.control.loop.do.cs'); - export const Else = createToken('else', 'keyword.control.conditional.else.cs'); export const Enum = createToken('enum', 'keyword.other.enum.cs'); export const Event = createToken('event', 'keyword.other.event.cs'); export const Explicit = createToken('explicit', 'keyword.other.explicit.cs'); export const Extern = createToken('extern', 'keyword.other.extern.cs'); export const Finally = createToken('finally', 'keyword.control.try.finally.cs'); - export const For = createToken('for', 'keyword.control.loop.for.cs'); - export const ForEach = createToken('foreach', 'keyword.control.loop.foreach.cs'); export const Get = createToken('get', 'keyword.other.get.cs'); - export const Goto = createToken('goto', 'keyword.control.goto.cs'); - export const If = createToken('if', 'keyword.control.conditional.if.cs'); export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); - export const In = createToken('in', 'keyword.control.loop.in.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Lock = createToken('lock', 'keyword.other.lock.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); export const Operator = createToken('operator', 'keyword.other.operator.cs'); export const Remove = createToken('remove', 'keyword.other.remove.cs'); - export const Return = createToken('return', 'keyword.control.flow.return.cs'); export const Set = createToken('set', 'keyword.other.set.cs'); export const Static = createToken('static', 'keyword.other.static.cs'); export const Struct = createToken('struct', 'keyword.other.struct.cs'); - export const Switch = createToken('switch', 'keyword.control.switch.cs'); export const This = createToken('this', 'keyword.other.this.cs'); - export const Throw = createToken('throw', 'keyword.control.flow.throw.cs'); - export const Try = createToken('try', 'keyword.control.try.cs'); export const TypeOf = createToken('typeof', 'keyword.other.typeof.cs'); export const Unchecked = createToken('unchecked', 'keyword.other.unchecked.cs'); export const Using = createToken('using', 'keyword.other.using.cs'); export const Var = createToken('var', 'keyword.other.var.cs'); - export const When = createToken('when', 'keyword.control.try.when.cs'); export const Where = createToken('where', 'keyword.other.where.cs'); - export const While = createToken('while', 'keyword.control.loop.while.cs'); - export const Yield = createToken('yield', 'keyword.control.flow.yield.cs'); } export namespace Literals { diff --git a/test/syntaxes/yield-statements.test.syntax.ts b/test/syntaxes/yield-statements.test.syntax.ts index 6de730eec9..6ca6509f4a 100644 --- a/test/syntaxes/yield-statements.test.syntax.ts +++ b/test/syntaxes/yield-statements.test.syntax.ts @@ -15,8 +15,8 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Yield, - Token.Keywords.Return, + Token.Keywords.Control.Yield, + Token.Keywords.Control.Return, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.Semicolon ]); @@ -27,8 +27,8 @@ describe("Grammar", () => { const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Keywords.Yield, - Token.Keywords.Break, + Token.Keywords.Control.Yield, + Token.Keywords.Control.Break, Token.Punctuation.Semicolon ]); }); From 5861d0d6024d40ea6d061c3238e2b429cde2ffc7 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 15:17:15 -0800 Subject: [PATCH 117/192] Consolidate statement tests --- .../syntaxes/checked-unchecked.test.syntax.ts | 31 +- .../iteration-statements.test.syntax.ts | 171 --- test/syntaxes/lock-statements.test.syntax.ts | 99 -- .../selection-statements.test.syntax.ts | 351 ------ test/syntaxes/statements.test.syntax.ts | 995 ++++++++++++++++++ test/syntaxes/try-statements.test.syntax.ts | 208 ---- test/syntaxes/using-statements.test.syntax.ts | 151 --- test/syntaxes/yield-statements.test.syntax.ts | 36 - 8 files changed, 996 insertions(+), 1046 deletions(-) delete mode 100644 test/syntaxes/iteration-statements.test.syntax.ts delete mode 100644 test/syntaxes/lock-statements.test.syntax.ts delete mode 100644 test/syntaxes/selection-statements.test.syntax.ts create mode 100644 test/syntaxes/statements.test.syntax.ts delete mode 100644 test/syntaxes/try-statements.test.syntax.ts delete mode 100644 test/syntaxes/using-statements.test.syntax.ts delete mode 100644 test/syntaxes/yield-statements.test.syntax.ts diff --git a/test/syntaxes/checked-unchecked.test.syntax.ts b/test/syntaxes/checked-unchecked.test.syntax.ts index ac64cd4dd4..3354dd790b 100644 --- a/test/syntaxes/checked-unchecked.test.syntax.ts +++ b/test/syntaxes/checked-unchecked.test.syntax.ts @@ -10,34 +10,6 @@ describe("Grammar", () => { before(() => should()); describe("Checked/Unchecked", () => { - it("checked statement", () => { - const input = Input.InMethod(` -checked -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Checked, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("unchecked statement", () => { - const input = Input.InMethod(` -unchecked -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Unchecked, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - it("checked expression", () => { const input = Input.InMethod(`int x = checked(42);`); const tokens = tokenize(input); @@ -70,7 +42,7 @@ unchecked ]); }); - it("", () => { + it("checked expression inside checked statement", () => { const input = ` class C { @@ -116,6 +88,5 @@ class C Token.Punctuation.CloseBrace ]); }); - }); }); \ No newline at end of file diff --git a/test/syntaxes/iteration-statements.test.syntax.ts b/test/syntaxes/iteration-statements.test.syntax.ts deleted file mode 100644 index c028c2abbd..0000000000 --- a/test/syntaxes/iteration-statements.test.syntax.ts +++ /dev/null @@ -1,171 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Iteration statements (loops)", () => { - it("single-line while loop", () => { - const input = Input.InMethod(`while (true) { }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.While, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("single-line do..while loop", () => { - - const input = Input.InMethod(`do { } while (true);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Do, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.While, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("single-line for loop", () => { - - const input = Input.InMethod(`for (int i = 0; i < 42; i++) { }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.For, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Local("i"), - Token.Operators.Assignment, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Relational.LessThan, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Increment, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - ]); - }); - - it("for loop with break", () => { - - const input = Input.InMethod(` -for (int i = 0; i < 42; i++) -{ - break; -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.For, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Local("i"), - Token.Operators.Assignment, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Relational.LessThan, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Increment, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Break, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - ]); - }); - - it("for loop with continue", () => { - - const input = Input.InMethod(` -for (int i = 0; i < 42; i++) -{ - continue; -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.For, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Local("i"), - Token.Operators.Assignment, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Relational.LessThan, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon, - Token.Variables.ReadWrite("i"), - Token.Operators.Increment, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Continue, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - ]); - }); - - it("single-line foreach loop", () => { - - const input = Input.InMethod(`foreach (int i in numbers) { }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.ForEach, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Local("i"), - Token.Keywords.Control.In, - Token.Variables.ReadWrite("numbers"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - ]); - }); - - it("foreach loop with var (issue #816)", () => { - - const input = Input.InMethod(` -foreach (var s in myList) -{ - -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.ForEach, - Token.Punctuation.OpenParen, - Token.Keywords.Var, - Token.Variables.Local("s"), - Token.Keywords.Control.In, - Token.Variables.ReadWrite("myList"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/lock-statements.test.syntax.ts b/test/syntaxes/lock-statements.test.syntax.ts deleted file mode 100644 index 3b9f607ed7..0000000000 --- a/test/syntaxes/lock-statements.test.syntax.ts +++ /dev/null @@ -1,99 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Lock statements", () => { - it("single-line lock with embedded statement", () => { - const input = Input.InMethod(`lock (new object()) Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Lock, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("single-line lock with block", () => { - const input = Input.InMethod(`lock (new object()) { Do(); }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Lock, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("lock with embedded statement", () => { - const input = Input.InMethod(` -lock (new object()) - Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Lock, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("lock with block", () => { - const input = Input.InMethod(` -lock (new object()) -{ - Do(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Lock, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/selection-statements.test.syntax.ts b/test/syntaxes/selection-statements.test.syntax.ts deleted file mode 100644 index f77380f7db..0000000000 --- a/test/syntaxes/selection-statements.test.syntax.ts +++ /dev/null @@ -1,351 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Selection statements", () => { - it("single-line if with embedded statement", () => { - const input = Input.InMethod(`if (true) return;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Keywords.Control.Return, - Token.Punctuation.Semicolon - ]); - }); - - it("single-line if with embedded method call", () => { - const input = Input.InMethod(`if (true) Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("single-line if with block", () => { - const input = Input.InMethod(`if (true) { Do(); }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("if with embedded statement", () => { - const input = Input.InMethod(` -if (true) - Do(); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("if with block", () => { - const input = Input.InMethod(` -if (true) -{ - Do(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("if-else with embedded statements", () => { - const input = Input.InMethod(` -if (true) - Do(); -else - Dont(); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Keywords.Control.Else, - Token.Identifiers.MethodName("Dont"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("if-else with blocks", () => { - const input = Input.InMethod(` -if (true) -{ - Do(); -} -else -{ - Dont(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Else, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Dont"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("if-elseif with embedded statements", () => { - const input = Input.InMethod(` -if (true) - Do(); -else if (false) - Dont(); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Keywords.Control.Else, - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.False, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Dont"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("if-elseif with blocks", () => { - const input = Input.InMethod(` -if (true) -{ - Do(); -} -else if (false) -{ - Dont(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Else, - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.False, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Dont"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("switch statement", () => { - const input = Input.InMethod(` -switch (i) { -case 0: - goto case 1; -case 1: - goto default; -default: - break; -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Switch, - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("i"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Colon, - Token.Keywords.Control.Goto, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Semicolon, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Colon, - Token.Keywords.Control.Goto, - Token.Keywords.Control.Default, - Token.Punctuation.Semicolon, - Token.Keywords.Control.Default, - Token.Punctuation.Colon, - Token.Keywords.Control.Break, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("switch statement with blocks", () => { - const input = Input.InMethod(` -switch (i) { - case 0: - { - goto case 1; - } - case 1: - { - goto default; - } - default: - { - break; - } -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Switch, - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("i"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Colon, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Goto, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Case, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Colon, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Goto, - Token.Keywords.Control.Default, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Default, - Token.Punctuation.Colon, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Break, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("if statement inside while statment with continue and break", () => { - const input = Input.InMethod(` -while (i < 10) -{ - ++i; - if (true) continue; - break; -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.While, - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("i"), - Token.Operators.Relational.LessThan, - Token.Literals.Numeric.Decimal("10"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Operators.Increment, - Token.Variables.ReadWrite("i"), - Token.Punctuation.Semicolon, - Token.Keywords.Control.If, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Keywords.Control.Continue, - Token.Punctuation.Semicolon, - Token.Keywords.Control.Break, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/statements.test.syntax.ts b/test/syntaxes/statements.test.syntax.ts new file mode 100644 index 0000000000..7466d8b794 --- /dev/null +++ b/test/syntaxes/statements.test.syntax.ts @@ -0,0 +1,995 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Statements", () => { + describe("Checked/Unchecked", () => { + it("checked statement", () => { + const input = Input.InMethod(` +checked +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Checked, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("unchecked statement", () => { + const input = Input.InMethod(` +unchecked +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Unchecked, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Do", () => { + it("single-line do..while loop", () => { + const input = Input.InMethod(`do { } while (true);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Do, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.While, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("For", () => { + it("single-line for loop", () => { + const input = Input.InMethod(`for (int i = 0; i < 42; i++) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.For, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + ]); + }); + + it("for loop with break", () => { + const input = Input.InMethod(` +for (int i = 0; i < 42; i++) +{ + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.For, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Break, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + ]); + }); + + it("for loop with continue", () => { + const input = Input.InMethod(` +for (int i = 0; i < 42; i++) +{ + continue; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.For, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Local("i"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon, + Token.Variables.ReadWrite("i"), + Token.Operators.Increment, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Continue, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + ]); + }); + }); + + describe("ForEach", () => { + it("single-line foreach loop", () => { + const input = Input.InMethod(`foreach (int i in numbers) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.ForEach, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Local("i"), + Token.Keywords.Control.In, + Token.Variables.ReadWrite("numbers"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + ]); + }); + + it("foreach loop with var (issue #816)", () => { + const input = Input.InMethod(` +foreach (var s in myList) +{ + +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.ForEach, + Token.Punctuation.OpenParen, + Token.Keywords.Var, + Token.Variables.Local("s"), + Token.Keywords.Control.In, + Token.Variables.ReadWrite("myList"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + ]); + }); + }); + + describe("While", () => { + it("single-line while loop", () => { + const input = Input.InMethod(`while (true) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.While, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("If", () => { + it("single-line if with embedded statement", () => { + const input = Input.InMethod(`if (true) return;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Keywords.Control.Return, + Token.Punctuation.Semicolon + ]); + }); + + it("single-line if with embedded method call", () => { + const input = Input.InMethod(`if (true) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("single-line if with block", () => { + const input = Input.InMethod(`if (true) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("if with embedded statement", () => { + const input = Input.InMethod(` +if (true) + Do(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("if with block", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("if-else with embedded statements", () => { + const input = Input.InMethod(` +if (true) + Do(); +else + Dont(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Keywords.Control.Else, + Token.Identifiers.MethodName("Dont"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("if-else with blocks", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +} +else +{ + Dont(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Else, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Dont"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("if-elseif with embedded statements", () => { + const input = Input.InMethod(` +if (true) + Do(); +else if (false) + Dont(); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Keywords.Control.Else, + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.False, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Dont"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("if-elseif with blocks", () => { + const input = Input.InMethod(` +if (true) +{ + Do(); +} +else if (false) +{ + Dont(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Else, + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.False, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Dont"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("if statement inside while statment with continue and break", () => { + const input = Input.InMethod(` +while (i < 10) +{ + ++i; + if (true) continue; + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.While, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Operators.Relational.LessThan, + Token.Literals.Numeric.Decimal("10"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Operators.Increment, + Token.Variables.ReadWrite("i"), + Token.Punctuation.Semicolon, + Token.Keywords.Control.If, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Keywords.Control.Continue, + Token.Punctuation.Semicolon, + Token.Keywords.Control.Break, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Lock", () => { + it("single-line lock with embedded statement", () => { + const input = Input.InMethod(`lock (new object()) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("single-line lock with block", () => { + const input = Input.InMethod(`lock (new object()) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("lock with embedded statement", () => { + const input = Input.InMethod(` +lock (new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lock with block", () => { + const input = Input.InMethod(` +lock (new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Lock, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Switch", () => { + it("switch statement", () => { + const input = Input.InMethod(` +switch (i) { +case 0: + goto case 1; +case 1: + goto default; +default: + break; +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Switch, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Colon, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Semicolon, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Colon, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Default, + Token.Punctuation.Semicolon, + Token.Keywords.Control.Default, + Token.Punctuation.Colon, + Token.Keywords.Control.Break, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("switch statement with blocks", () => { + const input = Input.InMethod(` +switch (i) { + case 0: + { + goto case 1; + } + case 1: + { + goto default; + } + default: + { + break; + } +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Switch, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Case, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Goto, + Token.Keywords.Control.Default, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Default, + Token.Punctuation.Colon, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Break, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Try", () => { + it("try-finally", () => { + const input = Input.InMethod(` +try +{ +} +finally +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Finally, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch", () => { + const input = Input.InMethod(` +try +{ +} +catch +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch-finally", () => { + const input = Input.InMethod(` +try +{ +} +catch +{ +} +finally +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Finally, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch with exception type", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Punctuation.OpenParen, + Token.Type("Exception"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch with exception type and identifier", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception ex) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Punctuation.OpenParen, + Token.Type("Exception"), + Token.Variables.Local("ex"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch with exception filter", () => { + const input = Input.InMethod(` +try +{ + throw new Exception(); +} +catch when (true) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Throw, + Token.Keywords.New, + Token.Type("Exception"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Keywords.Control.When, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-catch with exception type and filter", () => { + const input = Input.InMethod(` +try +{ +} +catch (Exception) when (true) +{ +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Control.Catch, + Token.Punctuation.OpenParen, + Token.Type("Exception"), + Token.Punctuation.CloseParen, + Token.Keywords.Control.When, + Token.Punctuation.OpenParen, + Token.Literals.Boolean.True, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + + it("try-finally followed by statement", () => { + const input = Input.InMethod(` +try +{ +} +finally +{ +} +int x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Try, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Keywords.Finally, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.PrimitiveType.Int, + Token.Variables.Local("x"), + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("Using", () => { + it("single-line using with expression and embedded statement", () => { + const input = Input.InMethod(`using (new object()) Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("single-line using with expression and block", () => { + const input = Input.InMethod(`using (new object()) { Do(); }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("using with expression and embedded statement", () => { + const input = Input.InMethod(` +using (new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("using with expression and block", () => { + const input = Input.InMethod(` +using (new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + + it("using with local variable and embedded statement", () => { + const input = Input.InMethod(` +using (var o = new object()) + Do();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("using with local variable and block", () => { + const input = Input.InMethod(` +using (var o = new object()) +{ + Do(); +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Using, + Token.Punctuation.OpenParen, + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Identifiers.MethodName("Do"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Yield", () => { + it("yield return", () => { + const input = Input.InMethod(`yield return 42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Yield, + Token.Keywords.Control.Return, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon + ]); + }); + + it("yield break", () => { + const input = Input.InMethod(`yield break;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Yield, + Token.Keywords.Control.Break, + Token.Punctuation.Semicolon + ]); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/try-statements.test.syntax.ts b/test/syntaxes/try-statements.test.syntax.ts deleted file mode 100644 index e14c739650..0000000000 --- a/test/syntaxes/try-statements.test.syntax.ts +++ /dev/null @@ -1,208 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Try statements", () => { - it("try-finally", () => { - const input = Input.InMethod(` -try -{ -} -finally -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Finally, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch", () => { - const input = Input.InMethod(` -try -{ -} -catch -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch-finally", () => { - const input = Input.InMethod(` -try -{ -} -catch -{ -} -finally -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Finally, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch with exception type", () => { - const input = Input.InMethod(` -try -{ -} -catch (Exception) -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Punctuation.OpenParen, - Token.Type("Exception"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch with exception type and identifier", () => { - const input = Input.InMethod(` -try -{ -} -catch (Exception ex) -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Punctuation.OpenParen, - Token.Type("Exception"), - Token.Variables.Local("ex"), - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch with exception filter", () => { - const input = Input.InMethod(` -try -{ - throw new Exception(); -} -catch when (true) -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Keywords.Control.Throw, - Token.Keywords.New, - Token.Type("Exception"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Keywords.Control.When, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-catch with exception type and filter", () => { - const input = Input.InMethod(` -try -{ -} -catch (Exception) when (true) -{ -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Control.Catch, - Token.Punctuation.OpenParen, - Token.Type("Exception"), - Token.Punctuation.CloseParen, - Token.Keywords.Control.When, - Token.Punctuation.OpenParen, - Token.Literals.Boolean.True, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace - ]); - }); - - it("try-finally followed by statement", () => { - const input = Input.InMethod(` -try -{ -} -finally -{ -} -int x;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Try, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Keywords.Finally, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.PrimitiveType.Int, - Token.Variables.Local("x"), - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/using-statements.test.syntax.ts b/test/syntaxes/using-statements.test.syntax.ts deleted file mode 100644 index 6281826923..0000000000 --- a/test/syntaxes/using-statements.test.syntax.ts +++ /dev/null @@ -1,151 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Using statements", () => { - it("single-line using with expression and embedded statement", () => { - const input = Input.InMethod(`using (new object()) Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("single-line using with expression and block", () => { - const input = Input.InMethod(`using (new object()) { Do(); }`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("using with expression and embedded statement", () => { - const input = Input.InMethod(` -using (new object()) - Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("using with expression and block", () => { - const input = Input.InMethod(` -using (new object()) -{ - Do(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - - it("using with local variable and embedded statement", () => { - const input = Input.InMethod(` -using (var o = new object()) - Do();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("using with local variable and block", () => { - const input = Input.InMethod(` -using (var o = new object()) -{ - Do(); -}`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Using, - Token.Punctuation.OpenParen, - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Keywords.New, - Token.PrimitiveType.Object, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Identifiers.MethodName("Do"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/yield-statements.test.syntax.ts b/test/syntaxes/yield-statements.test.syntax.ts deleted file mode 100644 index 6ca6509f4a..0000000000 --- a/test/syntaxes/yield-statements.test.syntax.ts +++ /dev/null @@ -1,36 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Yield statements", () => { - it("yield return", () => { - const input = Input.InMethod(`yield return 42;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Yield, - Token.Keywords.Control.Return, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon - ]); - }); - - it("yield break", () => { - const input = Input.InMethod(`yield break;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Yield, - Token.Keywords.Control.Break, - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file From 616d6c7ea61d3db2a698a7da05e12363d7d010d4 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 15:35:03 -0800 Subject: [PATCH 118/192] Consolidate expression and literal tests --- ...nonymous-method-expressions.test.syntax.ts | 348 ---- test/syntaxes/boolean-literals.test.syntax.ts | 39 - test/syntaxes/cast-expressions.test.syntax.ts | 83 - test/syntaxes/char-literals.test.syntax.ts | 54 - .../syntaxes/checked-unchecked.test.syntax.ts | 92 - .../element-access-expressions.test.syntax.ts | 222 --- test/syntaxes/expressions.test.syntax.ts | 1561 ++++++++++++++++- .../invocation-expressions.test.syntax.ts | 272 --- test/syntaxes/literals.test.syntax.ts | 363 ++++ test/syntaxes/numeric-literals.test.syntax.ts | 65 - test/syntaxes/queries.test.syntax.ts | 449 ----- test/syntaxes/string-literals.test.syntax.ts | 251 --- 12 files changed, 1846 insertions(+), 1953 deletions(-) delete mode 100644 test/syntaxes/anonymous-method-expressions.test.syntax.ts delete mode 100644 test/syntaxes/boolean-literals.test.syntax.ts delete mode 100644 test/syntaxes/cast-expressions.test.syntax.ts delete mode 100644 test/syntaxes/char-literals.test.syntax.ts delete mode 100644 test/syntaxes/checked-unchecked.test.syntax.ts delete mode 100644 test/syntaxes/element-access-expressions.test.syntax.ts delete mode 100644 test/syntaxes/invocation-expressions.test.syntax.ts create mode 100644 test/syntaxes/literals.test.syntax.ts delete mode 100644 test/syntaxes/numeric-literals.test.syntax.ts delete mode 100644 test/syntaxes/queries.test.syntax.ts delete mode 100644 test/syntaxes/string-literals.test.syntax.ts diff --git a/test/syntaxes/anonymous-method-expressions.test.syntax.ts b/test/syntaxes/anonymous-method-expressions.test.syntax.ts deleted file mode 100644 index b91acbb594..0000000000 --- a/test/syntaxes/anonymous-method-expressions.test.syntax.ts +++ /dev/null @@ -1,348 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Anonymous method expressions", () => { - it("lambda expression with no parameters (assignment)", () => { - const input = Input.InMethod(`Action a = () => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Action"), - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with no parameters (assignment)", () => { - const input = Input.InMethod(`Func a = async () => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Func"), - Token.Punctuation.TypeParameters.Begin, - Token.Type("Task"), - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with single parameter (assignment)", () => { - const input = Input.InMethod(`Action a = x => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Action"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Variables.Parameter("x"), - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with single parameter (assignment)", () => { - const input = Input.InMethod(`Func a = async x => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Func"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, - Token.Type("Task"), - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Keywords.Modifiers.Async, - Token.Variables.Parameter("x"), - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with single typed parameter (assignment)", () => { - const input = Input.InMethod(`Action a = (int x) => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Action"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with single typed parameter (assignment)", () => { - const input = Input.InMethod(`Func a = async (int x) => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Func"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, - Token.Type("Task"), - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with multiple typed parameters (assignment)", () => { - const input = Input.InMethod(`Action a = (int x, int y) => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Action"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with multiple typed parameters (assignment)", () => { - const input = Input.InMethod(`Func a = async (int x, int y) => { };`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Type("Func"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, - Token.Type("Task"), - Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), - Token.Operators.Assignment, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with no parameters (passed as argument)", () => { - const input = Input.InMethod(`M(() => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with no parameters (passed as argument)", () => { - const input = Input.InMethod(`M(async () => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with single parameter (passed as argument)", () => { - const input = Input.InMethod(`M(x => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with single parameter (passed as argument)", () => { - const input = Input.InMethod(`M(async x => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Async, - Token.Variables.Parameter("x"), - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with single typed parameter (passed as argument)", () => { - const input = Input.InMethod(`M((int x) => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with single typed parameter (passed as argument)", () => { - const input = Input.InMethod(`M(async (int x) => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("lambda expression with multiple typed parameters (passed as argument)", () => { - const input = Input.InMethod(`M((int x, int y) => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("async lambda expression with multiple typed parameters (passed as argument)", () => { - const input = Input.InMethod(`M(async (int x, int y) => { });`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Async, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), - Token.Punctuation.Comma, - Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/boolean-literals.test.syntax.ts b/test/syntaxes/boolean-literals.test.syntax.ts deleted file mode 100644 index 9c9363c03d..0000000000 --- a/test/syntaxes/boolean-literals.test.syntax.ts +++ /dev/null @@ -1,39 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Literals - boolean", () => { - it("true", () => { - - const input = Input.InClass(`bool x = true;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Bool, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Boolean.True, - Token.Punctuation.Semicolon]); - }); - - it("false", () => { - - const input = Input.InClass(`bool x = false;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Bool, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Boolean.False, - Token.Punctuation.Semicolon]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/cast-expressions.test.syntax.ts b/test/syntaxes/cast-expressions.test.syntax.ts deleted file mode 100644 index 08e3c2b050..0000000000 --- a/test/syntaxes/cast-expressions.test.syntax.ts +++ /dev/null @@ -1,83 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Cast expressions", () => { - it("cast to built-in type in assignment", () => { - const input = Input.InMethod(`var o = (object)42;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Object, - Token.Punctuation.CloseParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon - ]); - }); - - it("cast to generic type in assignment", () => { - const input = Input.InMethod(`var o = (C)42;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.Type("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.CloseParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.Semicolon - ]); - }); - - it("passed to invocation", () => { - const input = Input.InMethod(`M((int)42);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Punctuation.CloseParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("chained cast passed to invocation", () => { - const input = Input.InMethod(`M((int)(object)42);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Object, - Token.Punctuation.CloseParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/char-literals.test.syntax.ts b/test/syntaxes/char-literals.test.syntax.ts deleted file mode 100644 index 073b910e37..0000000000 --- a/test/syntaxes/char-literals.test.syntax.ts +++ /dev/null @@ -1,54 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Literals - char", () => { - it("empty", () => { - const input = Input.InMethod(`var x = '';`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("x"), - Token.Operators.Assignment, - Token.Punctuation.Char.Begin, - Token.Punctuation.Char.End, - Token.Punctuation.Semicolon]); - }); - - it("letter", () => { - const input = Input.InMethod(`var x = 'a';`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("x"), - Token.Operators.Assignment, - Token.Punctuation.Char.Begin, - Token.Literals.Char("a"), - Token.Punctuation.Char.End, - Token.Punctuation.Semicolon]); - }); - - it("escaped single quote", () => { - const input = Input.InMethod(`var x = '\\'';`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("x"), - Token.Operators.Assignment, - Token.Punctuation.Char.Begin, - Token.Literals.CharacterEscape("\\'"), - Token.Punctuation.Char.End, - Token.Punctuation.Semicolon]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/checked-unchecked.test.syntax.ts b/test/syntaxes/checked-unchecked.test.syntax.ts deleted file mode 100644 index 3354dd790b..0000000000 --- a/test/syntaxes/checked-unchecked.test.syntax.ts +++ /dev/null @@ -1,92 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Checked/Unchecked", () => { - it("checked expression", () => { - const input = Input.InMethod(`int x = checked(42);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Int, - Token.Variables.Local("x"), - Token.Operators.Assignment, - Token.Keywords.Checked, - Token.Punctuation.OpenParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("unchecked expression", () => { - const input = Input.InMethod(`int x = unchecked(42);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Int, - Token.Variables.Local("x"), - Token.Operators.Assignment, - Token.Keywords.Unchecked, - Token.Punctuation.OpenParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("checked expression inside checked statement", () => { - const input = ` -class C -{ - void M1() - { - checked - { - checked(++i); - } - } - void M2() { } -} -`; - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Class, - Token.Identifiers.ClassName("C"), - Token.Punctuation.OpenBrace, - Token.PrimitiveType.Void, - Token.Identifiers.MethodName("M1"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - - Token.Keywords.Checked, - Token.Punctuation.OpenBrace, - Token.Keywords.Checked, - Token.Punctuation.OpenParen, - Token.Operators.Increment, - Token.Variables.ReadWrite("i"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - - Token.Punctuation.CloseBrace, - Token.PrimitiveType.Void, - Token.Identifiers.MethodName("M2"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseBrace - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/element-access-expressions.test.syntax.ts b/test/syntaxes/element-access-expressions.test.syntax.ts deleted file mode 100644 index f901f06cb9..0000000000 --- a/test/syntaxes/element-access-expressions.test.syntax.ts +++ /dev/null @@ -1,222 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Element access expressions", () => { - it("no arguments", () => { - const input = Input.InMethod(`var o = P[];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("one argument", () => { - const input = Input.InMethod(`var o = P[42];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("two arguments", () => { - const input = Input.InMethod(`var o = P[19, 23];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Literals.Numeric.Decimal("19"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("23"), - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("two named arguments", () => { - const input = Input.InMethod(`var o = P[x: 19, y: 23];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Variables.Parameter("x"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("19"), - Token.Punctuation.Comma, - Token.Variables.Parameter("y"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("23"), - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("ref argument", () => { - const input = Input.InMethod(`var o = P[ref x];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Keywords.Modifiers.Ref, - Token.Variables.ReadWrite("x"), - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("out argument", () => { - const input = Input.InMethod(`var o = P[out x];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Keywords.Modifiers.Out, - Token.Variables.ReadWrite("x"), - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("member of generic with no arguments", () => { - const input = Input.InMethod(`var o = C.P[];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Object("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.Accessor, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("member of qualified generic with no arguments", () => { - const input = Input.InMethod(`var o = N.C.P[];`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Object("N"), - Token.Punctuation.Accessor, - Token.Variables.Object("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.Accessor, - Token.Variables.Property("P"), - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Punctuation.Semicolon - ]); - }); - - it("read/write array element", () => { - const input = Input.InMethod(` -object[] a1 = {(null), (this.a), c}; -a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Object, - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Variables.Local("a1"), - Token.Operators.Assignment, - Token.Punctuation.OpenBrace, - Token.Punctuation.OpenParen, - Token.Literals.Null, - Token.Punctuation.CloseParen, - Token.Punctuation.Comma, - Token.Punctuation.OpenParen, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("a"), - Token.Punctuation.CloseParen, - Token.Punctuation.Comma, - Token.Variables.ReadWrite("c"), - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon, - - Token.Variables.Property("a1"), - Token.Punctuation.OpenBracket, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.CloseBracket, - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("a"), - Token.Punctuation.CloseParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Variables.Property("a1"), - Token.Punctuation.OpenBracket, - Token.Literals.Numeric.Decimal("2"), - Token.Punctuation.CloseBracket, - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("c"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Variables.Property("a1"), - Token.Punctuation.OpenBracket, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.CloseBracket, - Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("i"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 85ad9f0bde..e5aa679d74 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -10,90 +10,1495 @@ describe("Grammar", () => { before(() => should()); describe("Expressions", () => { - it("array creation expression passed as argument", () => { - const input = Input.InMethod(`c.abst(ref s, new int[] {1, i, i});`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Variables.Object("c"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("abst"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Ref, - Token.Variables.ReadWrite("s"), - Token.Punctuation.Comma, - Token.Keywords.New, - Token.PrimitiveType.Int, - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Punctuation.OpenBrace, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("i"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("i"), - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); + describe("Anonymous Methods", () => { + it("lambda expression with no parameters (assignment)", () => { + const input = Input.InMethod(`Action a = () => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with no parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async () => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Action a = x => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Func a = async x => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (assignment)", () => { + const input = Input.InMethod(`Action a = (int x) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single typed parameter (assignment)", () => { + const input = Input.InMethod(`Func a = async (int x) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Action a = (int x, int y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async (int x, int y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Variables.Local("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with no parameters (passed as argument)", () => { + const input = Input.InMethod(`M(() => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with no parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async () => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single parameter (passed as argument)", () => { + const input = Input.InMethod(`M(x => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single parameter (passed as argument)", () => { + const input = Input.InMethod(`M(async x => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Variables.Parameter("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M((int x) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M((int x, int y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x, int y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Variables.Parameter("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Variables.Parameter("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("Arithmetic", () => { + it("mixed relational and arithmetic operators", () => { + const input = Input.InMethod(`b = this.i != 1 + (2 - 3);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.ReadWrite("b"), + Token.Operators.Assignment, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("i"), + Token.Operators.Relational.NotEqual, + Token.Literals.Numeric.Decimal("1"), + Token.Operators.Arithmetic.Addition, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("2"), + Token.Operators.Arithmetic.Subtraction, + Token.Literals.Numeric.Decimal("3"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("Array Creation", () => { + it("array creation expression passed as argument", () => { + const input = Input.InMethod(`c.abst(ref s, new int[] {1, i, i});`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("abst"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("s"), + Token.Punctuation.Comma, + Token.Keywords.New, + Token.PrimitiveType.Int, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("i"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("Casts", () => { + it("cast to built-in type in assignment", () => { + const input = Input.InMethod(`var o = (object)42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Object, + Token.Punctuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon + ]); + }); + + it("cast to generic type in assignment", () => { + const input = Input.InMethod(`var o = (C)42;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Type("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.Semicolon + ]); + }); + + it("passed to invocation", () => { + const input = Input.InMethod(`M((int)42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Punctuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("chained cast passed to invocation", () => { + const input = Input.InMethod(`M((int)(object)42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Object, + Token.Punctuation.CloseParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + + describe("Checked/Unchecked", () => { + it("checked expression", () => { + const input = Input.InMethod(`int x = checked(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Keywords.Checked, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("unchecked expression", () => { + const input = Input.InMethod(`int x = unchecked(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Keywords.Unchecked, + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("checked expression inside checked statement", () => { + const input = ` +class C +{ + void M1() + { + checked + { + checked(++i); + } + } + void M2() { } +}`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Class, + Token.Identifiers.ClassName("C"), + Token.Punctuation.OpenBrace, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("M1"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + + Token.Keywords.Checked, + Token.Punctuation.OpenBrace, + Token.Keywords.Checked, + Token.Punctuation.OpenParen, + Token.Operators.Increment, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + + Token.Punctuation.CloseBrace, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("M2"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); + + describe("Element Access", () => { + it("no arguments", () => { + const input = Input.InMethod(`var o = P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("one argument", () => { + const input = Input.InMethod(`var o = P[42];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("two arguments", () => { + const input = Input.InMethod(`var o = P[19, 23];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("19"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("two named arguments", () => { + const input = Input.InMethod(`var o = P[x: 19, y: 23];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Variables.Parameter("x"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Punctuation.Comma, + Token.Variables.Parameter("y"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("ref argument", () => { + const input = Input.InMethod(`var o = P[ref x];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("out argument", () => { + const input = Input.InMethod(`var o = P[out x];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Keywords.Modifiers.Out, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("member of generic with no arguments", () => { + const input = Input.InMethod(`var o = C.P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("member of qualified generic with no arguments", () => { + const input = Input.InMethod(`var o = N.C.P[];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Punctuation.Accessor, + Token.Variables.Object("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, + Token.Variables.Property("P"), + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("read/write array element", () => { + const input = Input.InMethod(` +object[] a1 = {(null), (this.a), c}; +a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Object, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Variables.Local("a1"), + Token.Operators.Assignment, + Token.Punctuation.OpenBrace, + Token.Punctuation.OpenParen, + Token.Literals.Null, + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, + Token.Punctuation.OpenParen, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("a"), + Token.Punctuation.CloseParen, + Token.Punctuation.Comma, + Token.Variables.ReadWrite("c"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon, + + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("a"), + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("2"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("c"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Property("a1"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBracket, + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("i"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + ]); + }); }); - it("arithmetic", () => { - const input = Input.InMethod(`b = this.i != 1 + (2 - 3);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Variables.ReadWrite("b"), - Token.Operators.Assignment, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("i"), - Token.Operators.Relational.NotEqual, - Token.Literals.Numeric.Decimal("1"), - Token.Operators.Arithmetic.Addition, - Token.Punctuation.OpenParen, - Token.Literals.Numeric.Decimal("2"), - Token.Operators.Arithmetic.Subtraction, - Token.Literals.Numeric.Decimal("3"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); + describe("Invocations", () => { + it("no arguments", () => { + const input = Input.InMethod(`M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("one argument", () => { + const input = Input.InMethod(`M(42);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("42"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("two arguments", () => { + const input = Input.InMethod(`M(19, 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Literals.Numeric.Decimal("19"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("two named arguments", () => { + const input = Input.InMethod(`M(x: 19, y: 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Punctuation.Comma, + Token.Variables.Parameter("y"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("ref argument", () => { + const input = Input.InMethod(`M(ref x);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Ref, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("out argument", () => { + const input = Input.InMethod(`M(out x);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Out, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("generic with no arguments", () => { + const input = Input.InMethod(`M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("nested generic with no arguments", () => { + const input = Input.InMethod(`M>();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("T"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("double-nested generic with no arguments", () => { + const input = Input.InMethod(`M>>();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("T"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("U"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("member of generic with no arguments", () => { + const input = Input.InMethod(`C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("member of qualified generic with no arguments", () => { + const input = Input.InMethod(`N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.Object("N"), + Token.Punctuation.Accessor, + Token.Variables.Object("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("store result of member of qualified generic with no arguments", () => { + const input = Input.InMethod(`var o = N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Punctuation.Accessor, + Token.Variables.Object("C"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("store result of qualified method with no arguments", () => { + const input = Input.InMethod(`var o = N.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Variables.Object("N"), + Token.Punctuation.Accessor, + Token.Variables.Property("C"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("store result of this.qualified method with no arguments", () => { + const input = Input.InMethod(`var o = this.C.M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("C"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("store result of invocation with two named arguments", () => { + const input = Input.InMethod(`var o = M(x: 19, y: 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("o"), + Token.Operators.Assignment, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.Parameter("x"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("19"), + Token.Punctuation.Comma, + Token.Variables.Parameter("y"), + Token.Punctuation.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); }); - it("typeof", () => { - const input = Input.InMethod(`var t = typeof(List<>);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("t"), - Token.Operators.Assignment, - Token.Keywords.TypeOf, - Token.Punctuation.OpenParen, - Token.Type("List"), - Token.Punctuation.TypeParameters.Begin, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); + describe("Primary", () => { + it("default", () => { + const input = Input.InMethod(`var t = default(List<>);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("t"), + Token.Operators.Assignment, + Token.Keywords.Default, + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("typeof", () => { + const input = Input.InMethod(`var t = typeof(List<>);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("t"), + Token.Operators.Assignment, + Token.Keywords.TypeOf, + Token.Punctuation.OpenParen, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); }); - it("default", () => { - const input = Input.InMethod(`var t = default(List<>);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("t"), - Token.Operators.Assignment, - Token.Keywords.Default, - Token.Punctuation.OpenParen, - Token.Type("List"), - Token.Punctuation.TypeParameters.Begin, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); + describe("Queries", () => { + it("from clause", () => { + const input = Input.InMethod(`var q = from n in numbers`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers") + ]); + }); + + it("from clause with type", () => { + const input = Input.InMethod(`var q = from int n in numbers`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.PrimitiveType.Int, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers") + ]); + }); + + it("from clause followed by from clause", () => { + const input = Input.InMethod(` +var q = from x in list1 + from y in list2 +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("x"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("list1"), + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("y"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("list2") + ]); + }); + + it("from clause, join clause", () => { + const input = Input.InMethod(` +var q = from c in customers + join o in orders on c.CustomerID equals o.CustomerID +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Join, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.On, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Equals, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID") + ]); + }); + + it("from clause, join-into clause", () => { + const input = Input.InMethod(` +var q = from c in customers + join o in orders on c.CustomerID equals o.CustomerID into co +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Join, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.On, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Equals, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("CustomerID"), + Token.Keywords.Queries.Into, + Token.Identifiers.RangeVariableName("co") + ]); + }); + + it("from clause, orderby", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name, o.Total +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total") + ]); + }); + + it("from clause, orderby ascending", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name ascending, o.Total +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Keywords.Queries.Ascending, + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total") + ]); + }); + + it("from clause, orderby descending", () => { + const input = Input.InMethod(` +var q = from o in orders + orderby o.Customer.Name, o.Total descending +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("o"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("orders"), + Token.Keywords.Queries.OrderBy, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Customer"), + Token.Punctuation.Accessor, + Token.Variables.Property("Name"), + Token.Punctuation.Comma, + Token.Variables.Object("o"), + Token.Punctuation.Accessor, + Token.Variables.Property("Total"), + Token.Keywords.Queries.Descending + ]); + }); + + it("from and select", () => { + const input = Input.InMethod(` +var q = from n in numbers + select n;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("numbers"), + Token.Keywords.Queries.Select, + Token.Variables.ReadWrite("n"), + Token.Punctuation.Semicolon + ]); + }); + + it("from and select with complex expressions", () => { + const input = Input.InMethod(` +var q = from n in new[] { 1, 3, 5, 7, 9 } + select n % 4 * 6;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("n"), + Token.Keywords.Queries.In, + Token.Keywords.New, + Token.Punctuation.OpenBracket, + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBrace, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("3"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("5"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("7"), + Token.Punctuation.Comma, + Token.Literals.Numeric.Decimal("9"), + Token.Punctuation.CloseBrace, + Token.Keywords.Queries.Select, + Token.Variables.ReadWrite("n"), + Token.Operators.Arithmetic.Remainder, + Token.Literals.Numeric.Decimal("4"), + Token.Operators.Arithmetic.Multiplication, + Token.Literals.Numeric.Decimal("6"), + Token.Punctuation.Semicolon + ]); + }); + + it("from and group by", () => { + const input = Input.InMethod(` +var q = from c in customers + group c by c.Country into g`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("q"), + Token.Operators.Assignment, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("c"), + Token.Keywords.Queries.In, + Token.Variables.ReadWrite("customers"), + Token.Keywords.Queries.Group, + Token.Variables.ReadWrite("c"), + Token.Keywords.Queries.By, + Token.Variables.Object("c"), + Token.Punctuation.Accessor, + Token.Variables.Property("Country"), + Token.Keywords.Queries.Into, + Token.Identifiers.RangeVariableName("g") + ]); + }); + + it("highlight complex query properly (issue #1106)", () => { + const input = Input.InClass(` +private static readonly Parser NodeParser = + from name in NodeName.Token() + from type in NodeValueType.Token() + from eq in Parse.Char('=') + from value in QuotedString.Token() + from lcurl in Parse.Char('{').Token() + from children in Parse.Ref(() => ChildrenNodesParser) + from rcurl in Parse.Char('}').Token() + select new Node + { + Name = name, + Type = type, + Value = value, + Children = children + }; +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.Static, + Token.Keywords.Modifiers.ReadOnly, + Token.Type("Parser"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Node"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.FieldName("NodeParser"), + Token.Operators.Assignment, + + // from name in NodeName.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("name"), + Token.Keywords.Queries.In, + Token.Variables.Object("NodeName"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from type in NodeValueType.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("type"), + Token.Keywords.Queries.In, + Token.Variables.Object("NodeValueType"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from eq in Parse.Char('=') + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("eq"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("="), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + + // from value in QuotedString.Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("value"), + Token.Keywords.Queries.In, + Token.Variables.Object("QuotedString"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from lcurl in Parse.Char('{').Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("lcurl"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("{"), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // from children in Parse.Ref(() => ChildrenNodesParser) + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("children"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Ref"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Variables.ReadWrite("ChildrenNodesParser"), + Token.Punctuation.CloseParen, + + // from rcurl in Parse.Char('}').Token() + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("rcurl"), + Token.Keywords.Queries.In, + Token.Variables.Object("Parse"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Char"), + Token.Punctuation.OpenParen, + Token.Punctuation.Char.Begin, + Token.Literals.Char("}"), + Token.Punctuation.Char.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Token"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + + // select new Node + // { + // Name = name, + // Type = type, + // Value = value, + // Children = children + // }; + Token.Keywords.Queries.Select, + Token.Keywords.New, + Token.Type("Node"), + Token.Punctuation.OpenBrace, + Token.Variables.ReadWrite("Name"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("name"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Type"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("type"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Value"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("value"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("Children"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("children"), + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); }); }); }); \ No newline at end of file diff --git a/test/syntaxes/invocation-expressions.test.syntax.ts b/test/syntaxes/invocation-expressions.test.syntax.ts deleted file mode 100644 index 4e017d6031..0000000000 --- a/test/syntaxes/invocation-expressions.test.syntax.ts +++ /dev/null @@ -1,272 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Invocation expressions", () => { - it("no arguments", () => { - const input = Input.InMethod(`M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("one argument", () => { - const input = Input.InMethod(`M(42);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Literals.Numeric.Decimal("42"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("two arguments", () => { - const input = Input.InMethod(`M(19, 23);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Literals.Numeric.Decimal("19"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("23"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("two named arguments", () => { - const input = Input.InMethod(`M(x: 19, y: 23);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("19"), - Token.Punctuation.Comma, - Token.Variables.Parameter("y"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("23"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("ref argument", () => { - const input = Input.InMethod(`M(ref x);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Ref, - Token.Variables.ReadWrite("x"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("out argument", () => { - const input = Input.InMethod(`M(out x);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Out, - Token.Variables.ReadWrite("x"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("generic with no arguments", () => { - const input = Input.InMethod(`M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("nested generic with no arguments", () => { - const input = Input.InMethod(`M>();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.TypeParameters.Begin, - Token.Type("T"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("double-nested generic with no arguments", () => { - const input = Input.InMethod(`M>>();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Identifiers.MethodName("M"), - Token.Punctuation.TypeParameters.Begin, - Token.Type("T"), - Token.Punctuation.TypeParameters.Begin, - Token.Type("U"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("member of generic with no arguments", () => { - const input = Input.InMethod(`C.M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Variables.Object("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("member of qualified generic with no arguments", () => { - const input = Input.InMethod(`N.C.M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Variables.Object("N"), - Token.Punctuation.Accessor, - Token.Variables.Object("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("store result of member of qualified generic with no arguments", () => { - const input = Input.InMethod(`var o = N.C.M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Object("N"), - Token.Punctuation.Accessor, - Token.Variables.Object("C"), - Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.TypeParameters.End, - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("store result of qualified method with no arguments", () => { - const input = Input.InMethod(`var o = N.C.M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Variables.Object("N"), - Token.Punctuation.Accessor, - Token.Variables.Property("C"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("store result of this.qualified method with no arguments", () => { - const input = Input.InMethod(`var o = this.C.M();`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("C"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("store result of invocation with two named arguments", () => { - const input = Input.InMethod(`var o = M(x: 19, y: 23);`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("o"), - Token.Operators.Assignment, - Token.Identifiers.MethodName("M"), - Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("19"), - Token.Punctuation.Comma, - Token.Variables.Parameter("y"), - Token.Punctuation.Colon, - Token.Literals.Numeric.Decimal("23"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/literals.test.syntax.ts b/test/syntaxes/literals.test.syntax.ts new file mode 100644 index 0000000000..8d88a74123 --- /dev/null +++ b/test/syntaxes/literals.test.syntax.ts @@ -0,0 +1,363 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("Literals", () => { + describe("Booleans", () => { + it("true", () => { + const input = Input.InClass(`bool x = true;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Bool, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Boolean.True, + Token.Punctuation.Semicolon]); + }); + + it("false", () => { + const input = Input.InClass(`bool x = false;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Bool, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Boolean.False, + Token.Punctuation.Semicolon]); + }); + }); + + describe("Chars", () => { + it("empty", () => { + const input = Input.InMethod(`var x = '';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + + it("letter", () => { + const input = Input.InMethod(`var x = 'a';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Literals.Char("a"), + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + + it("escaped single quote", () => { + const input = Input.InMethod(`var x = '\\'';`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Variables.Local("x"), + Token.Operators.Assignment, + Token.Punctuation.Char.Begin, + Token.Literals.CharacterEscape("\\'"), + Token.Punctuation.Char.End, + Token.Punctuation.Semicolon]); + }); + }); + + describe("Numbers", () => { + it("decimal zero", () => { + const input = Input.InClass(`int x = 0;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Semicolon]); + }); + + it("hexadecimal zero", () => { + const input = Input.InClass(`int x = 0x0;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Hexadecimal("0x0"), + Token.Punctuation.Semicolon]); + }); + + it("binary zero", () => { + const input = Input.InClass(`int x = 0b0;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Int, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Binary("0b0"), + Token.Punctuation.Semicolon]); + }); + + it("floating-point zero", () => { + const input = Input.InClass(`float x = 0.0;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.Float, + Token.Identifiers.FieldName("x"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0.0"), + Token.Punctuation.Semicolon]); + }); + }); + + describe("Strings", () => { + it("simple", () => { + const input = Input.InClass(`string test = "hello world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.Begin, + Token.Literals.String("hello world!"), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); + }); + + it("escaped double-quote", () => { + const input = Input.InClass(`string test = "hello \\"world!\\"";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.Begin, + Token.Literals.String("hello "), + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String("world!"), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); + }); + + it("line break before close quote", () => { + const input = Input.InClass(` +string test = "hello +world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.Begin, + Token.Literals.String("hello"), + + // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. + Token.IllegalNewLine(" "), + Token.Variables.ReadWrite("world"), + Token.Operators.Logical.Not, + Token.Punctuation.String.Begin, + Token.IllegalNewLine(";")]); + }); + + it("simple (verbatim)", () => { + const input = Input.InClass(`string test = @"hello world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.VerbatimBegin, + Token.Literals.String("hello world!"), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); + }); + + it("escaped double-quote (verbatim)", () => { + const input = Input.InClass("string test = @\"hello \"\"world!\"\"\";"); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.VerbatimBegin, + Token.Literals.String("hello "), + Token.Literals.CharacterEscape("\"\""), + Token.Literals.String("world!"), + Token.Literals.CharacterEscape("\"\""), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); + }); + + it("line break before close quote (verbatim)", () => { + const input = Input.InClass(` +string test = @"hello +world!";`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.PrimitiveType.String, + Token.Identifiers.FieldName("test"), + Token.Operators.Assignment, + Token.Punctuation.String.VerbatimBegin, + Token.Literals.String("hello "), + Token.Literals.String("world!"), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon]); + }); + + it("highlight escaped double-quote properly (issue #1078 - repro 1)", () => { + const input = Input.InMethod(` +configContent = rgx.Replace(configContent, $"name{suffix}\\""); +File.WriteAllText(_testConfigFile, configContent); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Variables.ReadWrite("configContent"), + Token.Operators.Assignment, + Token.Variables.Object('rgx'), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("Replace"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("configContent"), + Token.Punctuation.Comma, + Token.Punctuation.InterpolatedString.Begin, + Token.Literals.String("name"), + Token.Punctuation.Interpolation.Begin, + Token.Variables.ReadWrite("suffix"), + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.String.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Variables.Object("File"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("WriteAllText"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("_testConfigFile"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("configContent"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("highlight escaped double-quote properly (issue #1078 - repro 2)", () => { + const input = Input.InMethod(` +throw new InvalidCastException( + $"The value \\"{this.Value} is of the type \\"{this.Type}\\". You asked for \\"{typeof(T)}\\"."); +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Control.Throw, + Token.Keywords.New, + Token.Type("InvalidCastException"), + Token.Punctuation.OpenParen, + Token.Punctuation.InterpolatedString.Begin, + Token.Literals.String("The value "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Value"), + Token.Punctuation.Interpolation.End, + Token.Literals.String(" is of the type "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Type"), + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String(". You asked for "), + Token.Literals.CharacterEscape("\\\""), + Token.Punctuation.Interpolation.Begin, + Token.Keywords.TypeOf, + Token.Punctuation.OpenParen, + Token.Type("T"), + Token.Punctuation.CloseParen, + Token.Punctuation.Interpolation.End, + Token.Literals.CharacterEscape("\\\""), + Token.Literals.String("."), + Token.Punctuation.InterpolatedString.End, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("highlight strings containing braces correctly (issue #746)", () => { + const input = ` +namespace X +{ + class Y + { + public MethodZ() + { + this.Writer.WriteLine("class CInput{0}Register : public {1}", index, baseClass); + } + } +} +`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("X"), + Token.Punctuation.OpenBrace, + Token.Keywords.Class, + Token.Identifiers.ClassName("Y"), + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Identifiers.MethodName("MethodZ"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.This, + Token.Punctuation.Accessor, + Token.Variables.Property("Writer"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("WriteLine"), + Token.Punctuation.OpenParen, + Token.Punctuation.String.Begin, + Token.Literals.String("class CInput{0}Register : public {1}"), + Token.Punctuation.String.End, + Token.Punctuation.Comma, + Token.Variables.ReadWrite("index"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("baseClass"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); + }); + }); +}); \ No newline at end of file diff --git a/test/syntaxes/numeric-literals.test.syntax.ts b/test/syntaxes/numeric-literals.test.syntax.ts deleted file mode 100644 index 0c5d736aae..0000000000 --- a/test/syntaxes/numeric-literals.test.syntax.ts +++ /dev/null @@ -1,65 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Literals - numeric", () => { - it("decimal zero", () => { - - const input = Input.InClass(`int x = 0;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Int, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Numeric.Decimal("0"), - Token.Punctuation.Semicolon]); - }); - - it("hexadecimal zero", () => { - - const input = Input.InClass(`int x = 0x0;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Int, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Numeric.Hexadecimal("0x0"), - Token.Punctuation.Semicolon]); - }); - - it("binary zero", () => { - - const input = Input.InClass(`int x = 0b0;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Int, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Numeric.Binary("0b0"), - Token.Punctuation.Semicolon]); - }); - - it("floating-point zero", () => { - - const input = Input.InClass(`float x = 0.0;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.Float, - Token.Identifiers.FieldName("x"), - Token.Operators.Assignment, - Token.Literals.Numeric.Decimal("0.0"), - Token.Punctuation.Semicolon]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/queries.test.syntax.ts b/test/syntaxes/queries.test.syntax.ts deleted file mode 100644 index 79f017a937..0000000000 --- a/test/syntaxes/queries.test.syntax.ts +++ /dev/null @@ -1,449 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Query expressions", () => { - it("from clause", () => { - const input = Input.InMethod(`var q = from n in numbers`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("n"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("numbers") - ]); - }); - - it("from clause with type", () => { - const input = Input.InMethod(`var q = from int n in numbers`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.PrimitiveType.Int, - Token.Identifiers.RangeVariableName("n"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("numbers") - ]); - }); - - it("from clause followed by from clause", () => { - const input = Input.InMethod(` -var q = from x in list1 - from y in list2 -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("x"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("list1"), - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("y"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("list2") - ]); - }); - - it("from clause, join clause", () => { - const input = Input.InMethod(` -var q = from c in customers - join o in orders on c.CustomerID equals o.CustomerID -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("c"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("customers"), - Token.Keywords.Queries.Join, - Token.Identifiers.RangeVariableName("o"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("orders"), - Token.Keywords.Queries.On, - Token.Variables.Object("c"), - Token.Punctuation.Accessor, - Token.Variables.Property("CustomerID"), - Token.Keywords.Queries.Equals, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("CustomerID") - ]); - }); - - it("from clause, join-into clause", () => { - const input = Input.InMethod(` -var q = from c in customers - join o in orders on c.CustomerID equals o.CustomerID into co -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("c"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("customers"), - Token.Keywords.Queries.Join, - Token.Identifiers.RangeVariableName("o"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("orders"), - Token.Keywords.Queries.On, - Token.Variables.Object("c"), - Token.Punctuation.Accessor, - Token.Variables.Property("CustomerID"), - Token.Keywords.Queries.Equals, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("CustomerID"), - Token.Keywords.Queries.Into, - Token.Identifiers.RangeVariableName("co") - ]); - }); - - it("from clause, orderby", () => { - const input = Input.InMethod(` -var q = from o in orders - orderby o.Customer.Name, o.Total -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("o"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("orders"), - Token.Keywords.Queries.OrderBy, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, - Token.Variables.Property("Name"), - Token.Punctuation.Comma, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Total") - ]); - }); - - it("from clause, orderby ascending", () => { - const input = Input.InMethod(` -var q = from o in orders - orderby o.Customer.Name ascending, o.Total -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("o"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("orders"), - Token.Keywords.Queries.OrderBy, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, - Token.Variables.Property("Name"), - Token.Keywords.Queries.Ascending, - Token.Punctuation.Comma, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Total") - ]); - }); - - it("from clause, orderby descending", () => { - const input = Input.InMethod(` -var q = from o in orders - orderby o.Customer.Name, o.Total descending -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("o"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("orders"), - Token.Keywords.Queries.OrderBy, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Customer"), - Token.Punctuation.Accessor, - Token.Variables.Property("Name"), - Token.Punctuation.Comma, - Token.Variables.Object("o"), - Token.Punctuation.Accessor, - Token.Variables.Property("Total"), - Token.Keywords.Queries.Descending - ]); - }); - - it("from and select", () => { - const input = Input.InMethod(` -var q = from n in numbers - select n;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("n"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("numbers"), - Token.Keywords.Queries.Select, - Token.Variables.ReadWrite("n"), - Token.Punctuation.Semicolon - ]); - }); - - it("from and select with complex expressions", () => { - const input = Input.InMethod(` -var q = from n in new[] { 1, 3, 5, 7, 9 } - select n % 4 * 6;`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("n"), - Token.Keywords.Queries.In, - Token.Keywords.New, - Token.Punctuation.OpenBracket, - Token.Punctuation.CloseBracket, - Token.Punctuation.OpenBrace, - Token.Literals.Numeric.Decimal("1"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("3"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("5"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("7"), - Token.Punctuation.Comma, - Token.Literals.Numeric.Decimal("9"), - Token.Punctuation.CloseBrace, - Token.Keywords.Queries.Select, - Token.Variables.ReadWrite("n"), - Token.Operators.Arithmetic.Remainder, - Token.Literals.Numeric.Decimal("4"), - Token.Operators.Arithmetic.Multiplication, - Token.Literals.Numeric.Decimal("6"), - Token.Punctuation.Semicolon - ]); - }); - - it("from and group by", () => { - const input = Input.InMethod(` -var q = from c in customers - group c by c.Country into g`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Var, - Token.Variables.Local("q"), - Token.Operators.Assignment, - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("c"), - Token.Keywords.Queries.In, - Token.Variables.ReadWrite("customers"), - Token.Keywords.Queries.Group, - Token.Variables.ReadWrite("c"), - Token.Keywords.Queries.By, - Token.Variables.Object("c"), - Token.Punctuation.Accessor, - Token.Variables.Property("Country"), - Token.Keywords.Queries.Into, - Token.Identifiers.RangeVariableName("g") - ]); - }); - - it("highlight complex query properly (issue #1106)", () => { - const input = Input.InClass(` -private static readonly Parser NodeParser = - from name in NodeName.Token() - from type in NodeValueType.Token() - from eq in Parse.Char('=') - from value in QuotedString.Token() - from lcurl in Parse.Char('{').Token() - from children in Parse.Ref(() => ChildrenNodesParser) - from rcurl in Parse.Char('}').Token() - select new Node - { - Name = name, - Type = type, - Value = value, - Children = children - }; -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Modifiers.Private, - Token.Keywords.Modifiers.Static, - Token.Keywords.Modifiers.ReadOnly, - Token.Type("Parser"), - Token.Punctuation.TypeParameters.Begin, - Token.Type("Node"), - Token.Punctuation.TypeParameters.End, - Token.Identifiers.FieldName("NodeParser"), - Token.Operators.Assignment, - - // from name in NodeName.Token() - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("name"), - Token.Keywords.Queries.In, - Token.Variables.Object("NodeName"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Token"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - - // from type in NodeValueType.Token() - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("type"), - Token.Keywords.Queries.In, - Token.Variables.Object("NodeValueType"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Token"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - - // from eq in Parse.Char('=') - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("eq"), - Token.Keywords.Queries.In, - Token.Variables.Object("Parse"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Char"), - Token.Punctuation.OpenParen, - Token.Punctuation.Char.Begin, - Token.Literals.Char("="), - Token.Punctuation.Char.End, - Token.Punctuation.CloseParen, - - // from value in QuotedString.Token() - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("value"), - Token.Keywords.Queries.In, - Token.Variables.Object("QuotedString"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Token"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - - // from lcurl in Parse.Char('{').Token() - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("lcurl"), - Token.Keywords.Queries.In, - Token.Variables.Object("Parse"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Char"), - Token.Punctuation.OpenParen, - Token.Punctuation.Char.Begin, - Token.Literals.Char("{"), - Token.Punctuation.Char.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Token"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - - // from children in Parse.Ref(() => ChildrenNodesParser) - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("children"), - Token.Keywords.Queries.In, - Token.Variables.Object("Parse"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Ref"), - Token.Punctuation.OpenParen, - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Operators.Arrow, - Token.Variables.ReadWrite("ChildrenNodesParser"), - Token.Punctuation.CloseParen, - - // from rcurl in Parse.Char('}').Token() - Token.Keywords.Queries.From, - Token.Identifiers.RangeVariableName("rcurl"), - Token.Keywords.Queries.In, - Token.Variables.Object("Parse"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Char"), - Token.Punctuation.OpenParen, - Token.Punctuation.Char.Begin, - Token.Literals.Char("}"), - Token.Punctuation.Char.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Token"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - - // select new Node - // { - // Name = name, - // Type = type, - // Value = value, - // Children = children - // }; - Token.Keywords.Queries.Select, - Token.Keywords.New, - Token.Type("Node"), - Token.Punctuation.OpenBrace, - Token.Variables.ReadWrite("Name"), - Token.Operators.Assignment, - Token.Variables.ReadWrite("name"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("Type"), - Token.Operators.Assignment, - Token.Variables.ReadWrite("type"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("Value"), - Token.Operators.Assignment, - Token.Variables.ReadWrite("value"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("Children"), - Token.Operators.Assignment, - Token.Variables.ReadWrite("children"), - Token.Punctuation.CloseBrace, - Token.Punctuation.Semicolon - ]); - }); - }); -}); \ No newline at end of file diff --git a/test/syntaxes/string-literals.test.syntax.ts b/test/syntaxes/string-literals.test.syntax.ts deleted file mode 100644 index 3b3b73bad1..0000000000 --- a/test/syntaxes/string-literals.test.syntax.ts +++ /dev/null @@ -1,251 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { should } from 'chai'; -import { tokenize, Input, Token } from './utils/tokenize'; - -describe("Grammar", () => { - before(() => should()); - - describe("Literals - string", () => { - it("simple", () => { - - const input = Input.InClass(`string test = "hello world!";`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.Begin, - Token.Literals.String("hello world!"), - Token.Punctuation.String.End, - Token.Punctuation.Semicolon]); - }); - - it("escaped double-quote", () => { - - const input = Input.InClass(`string test = "hello \\"world!\\"";`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.Begin, - Token.Literals.String("hello "), - Token.Literals.CharacterEscape("\\\""), - Token.Literals.String("world!"), - Token.Literals.CharacterEscape("\\\""), - Token.Punctuation.String.End, - Token.Punctuation.Semicolon]); - }); - - it("line break before close quote", () => { - - const input = Input.InClass(` -string test = "hello -world!";`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.Begin, - Token.Literals.String("hello"), - - // Note: Because the string ended prematurely, the rest of this line and the contents of the next are junk. - Token.IllegalNewLine(" "), - Token.Variables.ReadWrite("world"), - Token.Operators.Logical.Not, - Token.Punctuation.String.Begin, - Token.IllegalNewLine(";")]); - }); - - it("simple (verbatim)", () => { - - const input = Input.InClass(`string test = @"hello world!";`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.VerbatimBegin, - Token.Literals.String("hello world!"), - Token.Punctuation.String.End, - Token.Punctuation.Semicolon]); - }); - - it("escaped double-quote (verbatim)", () => { - - const input = Input.InClass("string test = @\"hello \"\"world!\"\"\";"); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.VerbatimBegin, - Token.Literals.String("hello "), - Token.Literals.CharacterEscape("\"\""), - Token.Literals.String("world!"), - Token.Literals.CharacterEscape("\"\""), - Token.Punctuation.String.End, - Token.Punctuation.Semicolon]); - }); - - it("line break before close quote (verbatim)", () => { - - const input = Input.InClass(` -string test = @"hello -world!";`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.PrimitiveType.String, - Token.Identifiers.FieldName("test"), - Token.Operators.Assignment, - Token.Punctuation.String.VerbatimBegin, - Token.Literals.String("hello "), - Token.Literals.String("world!"), - Token.Punctuation.String.End, - Token.Punctuation.Semicolon]); - }); - - it("highlight escaped double-quote properly (issue #1078 - repro 1)", () => { - - const input = Input.InMethod(` -configContent = rgx.Replace(configContent, $"name{suffix}\\""); -File.WriteAllText(_testConfigFile, configContent); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Variables.ReadWrite("configContent"), - Token.Operators.Assignment, - Token.Variables.Object('rgx'), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("Replace"), - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("configContent"), - Token.Punctuation.Comma, - Token.Punctuation.InterpolatedString.Begin, - Token.Literals.String("name"), - Token.Punctuation.Interpolation.Begin, - Token.Variables.ReadWrite("suffix"), - Token.Punctuation.Interpolation.End, - Token.Literals.CharacterEscape("\\\""), - Token.Punctuation.String.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Variables.Object("File"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("WriteAllText"), - Token.Punctuation.OpenParen, - Token.Variables.ReadWrite("_testConfigFile"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("configContent"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("highlight escaped double-quote properly (issue #1078 - repro 2)", () => { - - const input = Input.InMethod(` -throw new InvalidCastException( - $"The value \\"{this.Value} is of the type \\"{this.Type}\\". You asked for \\"{typeof(T)}\\"."); -`); - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Control.Throw, - Token.Keywords.New, - Token.Type("InvalidCastException"), - Token.Punctuation.OpenParen, - Token.Punctuation.InterpolatedString.Begin, - Token.Literals.String("The value "), - Token.Literals.CharacterEscape("\\\""), - Token.Punctuation.Interpolation.Begin, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("Value"), - Token.Punctuation.Interpolation.End, - Token.Literals.String(" is of the type "), - Token.Literals.CharacterEscape("\\\""), - Token.Punctuation.Interpolation.Begin, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("Type"), - Token.Punctuation.Interpolation.End, - Token.Literals.CharacterEscape("\\\""), - Token.Literals.String(". You asked for "), - Token.Literals.CharacterEscape("\\\""), - Token.Punctuation.Interpolation.Begin, - Token.Keywords.TypeOf, - Token.Punctuation.OpenParen, - Token.Type("T"), - Token.Punctuation.CloseParen, - Token.Punctuation.Interpolation.End, - Token.Literals.CharacterEscape("\\\""), - Token.Literals.String("."), - Token.Punctuation.InterpolatedString.End, - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon - ]); - }); - - it("highlight strings containing braces correctly (issue #746)", () => { - - const input = ` -namespace X -{ - class Y - { - public MethodZ() - { - this.Writer.WriteLine("class CInput{0}Register : public {1}", index, baseClass); - } - } -} -`; - const tokens = tokenize(input); - - tokens.should.deep.equal([ - Token.Keywords.Namespace, - Token.Identifiers.NamespaceName("X"), - Token.Punctuation.OpenBrace, - Token.Keywords.Class, - Token.Identifiers.ClassName("Y"), - Token.Punctuation.OpenBrace, - Token.Keywords.Modifiers.Public, - Token.Identifiers.MethodName("MethodZ"), - Token.Punctuation.OpenParen, - Token.Punctuation.CloseParen, - Token.Punctuation.OpenBrace, - Token.Keywords.This, - Token.Punctuation.Accessor, - Token.Variables.Property("Writer"), - Token.Punctuation.Accessor, - Token.Identifiers.MethodName("WriteLine"), - Token.Punctuation.OpenParen, - Token.Punctuation.String.Begin, - Token.Literals.String("class CInput{0}Register : public {1}"), - Token.Punctuation.String.End, - Token.Punctuation.Comma, - Token.Variables.ReadWrite("index"), - Token.Punctuation.Comma, - Token.Variables.ReadWrite("baseClass"), - Token.Punctuation.CloseParen, - Token.Punctuation.Semicolon, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseBrace, - Token.Punctuation.CloseBrace - ]); - }); - }); -}); \ No newline at end of file From e01afdba2e7a66374ae548b255b82e2472036fc6 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 15:48:57 -0800 Subject: [PATCH 119/192] Clean up several token types --- syntaxes/build/index.d.ts | 3 - syntaxes/build/package.json | 18 ---- syntaxes/build/tsconfig.json | 10 -- syntaxes/csharp.tmLanguage.yml | 26 ++--- test/syntaxes/constructors.test.syntax.ts | 28 ++--- test/syntaxes/delegates.test.syntax.ts | 8 +- test/syntaxes/enums.test.syntax.ts | 26 ++--- test/syntaxes/expressions.test.syntax.ts | 120 +++++++++++----------- test/syntaxes/fields.test.syntax.ts | 8 +- test/syntaxes/indexers.test.syntax.ts | 10 +- test/syntaxes/literals.test.syntax.ts | 6 +- test/syntaxes/locals.test.syntax.ts | 18 ++-- test/syntaxes/methods.test.syntax.ts | 36 +++---- test/syntaxes/operators.test.syntax.ts | 86 ++++++++-------- test/syntaxes/preprocessor.test.syntax.ts | 8 +- test/syntaxes/statements.test.syntax.ts | 18 ++-- test/syntaxes/utils/tokenize.ts | 8 +- 17 files changed, 203 insertions(+), 234 deletions(-) delete mode 100644 syntaxes/build/index.d.ts delete mode 100644 syntaxes/build/package.json delete mode 100644 syntaxes/build/tsconfig.json diff --git a/syntaxes/build/index.d.ts b/syntaxes/build/index.d.ts deleted file mode 100644 index 8f29ae685e..0000000000 --- a/syntaxes/build/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare module "plist" { - export function build(json: any): string; -} \ No newline at end of file diff --git a/syntaxes/build/package.json b/syntaxes/build/package.json deleted file mode 100644 index eae417dc3b..0000000000 --- a/syntaxes/build/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "build", - "version": "1.0.0", - "private": true, - "description": "", - "main": "build.js", - "scripts": { - "postinstall": "npm start", - "start": "tsc && node ./build.js" - }, - "dependencies": { - "@types/js-yaml": "latest", - "@types/node": "latest", - "js-yaml": "latest", - "plist": "latest", - "typescript": "latest" - } -} \ No newline at end of file diff --git a/syntaxes/build/tsconfig.json b/syntaxes/build/tsconfig.json deleted file mode 100644 index 14c76f8513..0000000000 --- a/syntaxes/build/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "compilerOptions": { - "target": "ES5", - "module": "commonjs", - "noImplicitAny": true - }, - "exclude": [ - "node_modules" - ] -} \ No newline at end of file diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 2ca4673bce..594d8106f9 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,7 +8,7 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * lambda expressions and anonymous functions +# * anonymous methods # * is and as cast expressions # * null coalescing operator # * null propagating operator @@ -349,7 +349,7 @@ repository: - include: '#punctuation-comma' - begin: '[_[:alpha:]][_[:alnum:]]*' beginCaptures: - '0': { name: variable.other.enummember.cs } + '0': { name: entity.name.variable.enum-member.cs } end: (?=(,|\})) patterns: - include: '#comment' @@ -1064,7 +1064,7 @@ repository: # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. # '6': ? is a sub-expression. It's final value is not considered. - '7': { name: variable.local.cs } + '7': { name: entity.name.variable.local.cs } '8': { name: keyword.control.loop.in.cs } - include: '#expression' - include: '#statement' @@ -1129,7 +1129,7 @@ repository: # '3': ? is a sub-expression. It's final value is not considered. # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. - '6': { name: variable.local.cs } + '6': { name: entity.name.variable.local.cs } - include: '#when-clause' - include: '#block' @@ -1228,10 +1228,10 @@ repository: # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. # '6': ? is a sub-expression. It's final value is not considered. - '7': { name: variable.local.cs } + '7': { name: entity.name.variable.local.cs } end: (?=;|\)) patterns: - - name: variable.local.cs + - name: entity.name.variable.local.cs match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' - include: '#comment' @@ -1266,10 +1266,10 @@ repository: # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. # '6': ? is a sub-expression. It's final value is not considered. - '7': { name: variable.local.cs } + '7': { name: entity.name.variable.local.cs } end: (?=;) patterns: - - name: variable.local.cs + - name: entity.name.variable.local.cs match: '[_[:alpha:]][_[:alnum:]]*' - include: '#punctuation-comma' - include: '#comment' @@ -1686,7 +1686,7 @@ repository: # parameter name - match: \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,\]]) captures: - '1': { name: variable.parameter.cs } + '1': { name: entity.name.variable.parameter.cs } - include: '#variable-initializer' - include: '#type' - include: '#punctuation-comma' @@ -1706,7 +1706,7 @@ repository: # parameter name - match: \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,)]) captures: - '1': { name: variable.parameter.cs } + '1': { name: entity.name.variable.parameter.cs } - include: '#variable-initializer' - include: '#type' - include: '#punctuation-comma' @@ -1738,7 +1738,7 @@ repository: named-argument: begin: ([_[:alpha:]][_[:alnum:]]*)\s*(:) beginCaptures: - '1': { name: variable.parameter.cs } + '1': { name: entity.name.variable.parameter.cs } '2': { name: punctuation.separator.colon.cs } end: (?=(,|\)|\])) patterns: @@ -1939,7 +1939,7 @@ repository: (=>) beginCaptures: '1': { name: storage.modifier.cs } - '2': { name: variable.parameter.cs } + '2': { name: entity.name.variable.parameter.cs } '3': { name: keyword.operator.arrow.cs } end: (?=\)|;) patterns: @@ -2010,7 +2010,7 @@ repository: # '3': ? is a sub-expression. It's final value is not considered. # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. - '6': { name: entity.name.variable.tuple.cs } + '6': { name: entity.name.variable.tuple-element.cs } type-builtin: match: \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort|void)\b diff --git a/test/syntaxes/constructors.test.syntax.ts b/test/syntaxes/constructors.test.syntax.ts index 57fc720ce7..553ec14ef4 100644 --- a/test/syntaxes/constructors.test.syntax.ts +++ b/test/syntaxes/constructors.test.syntax.ts @@ -44,7 +44,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); @@ -60,7 +60,7 @@ describe("Grammar", () => { Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); @@ -77,10 +77,10 @@ TestClass(int x, int y) Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace]); @@ -94,10 +94,10 @@ TestClass(int x, int y) Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Identifiers.MethodName("Foo"), @@ -161,7 +161,7 @@ TestClass(int x, int y) Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.Colon, Token.Keywords.This, @@ -181,12 +181,12 @@ TestClass(int x, int y) Token.Identifiers.MethodName("TestClass"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.Colon, Token.Keywords.This, Token.Punctuation.OpenParen, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.Colon, Token.Variables.ReadWrite("x"), Token.Punctuation.CloseParen, @@ -247,7 +247,7 @@ internal WaitHandle(Task self, TT.Task /*task) Token.Identifiers.MethodName("WaitHandle"), Token.Punctuation.OpenParen, Token.Type("Task"), - Token.Variables.Parameter("self"), + Token.Identifiers.ParameterName("self"), Token.Punctuation.Comma, Token.Type("TT"), Token.Punctuation.Accessor, @@ -315,19 +315,19 @@ public class A Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Var, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Semicolon, Token.Keywords.Var, - Token.Variables.Local("b"), + Token.Identifiers.LocalName("b"), Token.Operators.Assignment, Token.Punctuation.String.Begin, Token.Literals.String("abc"), Token.Punctuation.String.End, Token.Punctuation.Semicolon, Token.Keywords.Var, - Token.Variables.Local("c"), + Token.Identifiers.LocalName("c"), Token.Operators.Assignment, Token.Keywords.New, Token.Type("B"), @@ -338,7 +338,7 @@ public class A Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Keywords.Var, - Token.Variables.Local("c"), + Token.Identifiers.LocalName("c"), Token.Operators.Assignment, Token.Keywords.New, Token.Type("B"), diff --git a/test/syntaxes/delegates.test.syntax.ts b/test/syntaxes/delegates.test.syntax.ts index 9bf09bd072..5ca86f7273 100644 --- a/test/syntaxes/delegates.test.syntax.ts +++ b/test/syntaxes/delegates.test.syntax.ts @@ -42,7 +42,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), - Token.Variables.Parameter("arg1"), + Token.Identifiers.ParameterName("arg1"), Token.Punctuation.CloseParen, Token.Punctuation.Semicolon]); }); @@ -111,17 +111,17 @@ delegate void D() Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Ref, Token.PrimitiveType.String, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Out, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, - Token.Variables.Parameter("z"), + Token.Identifiers.ParameterName("z"), Token.Punctuation.CloseParen, Token.Punctuation.Semicolon]); }); diff --git a/test/syntaxes/enums.test.syntax.ts b/test/syntaxes/enums.test.syntax.ts index 868c3cebd0..e8efe4f433 100644 --- a/test/syntaxes/enums.test.syntax.ts +++ b/test/syntaxes/enums.test.syntax.ts @@ -45,7 +45,7 @@ describe("Grammar", () => { Token.Keywords.Enum, Token.Identifiers.EnumName("E"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("M1"), + Token.Identifiers.EnumMemberName("M1"), Token.Punctuation.CloseBrace]); }); @@ -58,11 +58,11 @@ describe("Grammar", () => { Token.Keywords.Enum, Token.Identifiers.EnumName("Color"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("Red"), + Token.Identifiers.EnumMemberName("Red"), Token.Punctuation.Comma, - Token.Variables.EnumMember("Green"), + Token.Identifiers.EnumMemberName("Green"), Token.Punctuation.Comma, - Token.Variables.EnumMember("Blue"), + Token.Identifiers.EnumMemberName("Blue"), Token.Punctuation.CloseBrace]); }); @@ -83,13 +83,13 @@ enum E Token.Keywords.Enum, Token.Identifiers.EnumName("E"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("Value1"), + Token.Identifiers.EnumMemberName("Value1"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("1"), Token.Punctuation.Comma, - Token.Variables.EnumMember("Value2"), + Token.Identifiers.EnumMemberName("Value2"), Token.Punctuation.Comma, - Token.Variables.EnumMember("Value3"), + Token.Identifiers.EnumMemberName("Value3"), Token.Punctuation.CloseBrace]); }); @@ -128,13 +128,13 @@ public class TestClass2 Token.Keywords.Enum, Token.Identifiers.EnumName("TestEnum"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("enum1"), + Token.Identifiers.EnumMemberName("enum1"), Token.Punctuation.Comma, - Token.Variables.EnumMember("enum2"), + Token.Identifiers.EnumMemberName("enum2"), Token.Punctuation.Comma, - Token.Variables.EnumMember("enum3"), + Token.Identifiers.EnumMemberName("enum3"), Token.Punctuation.Comma, - Token.Variables.EnumMember("enum4"), + Token.Identifiers.EnumMemberName("enum4"), Token.Punctuation.CloseBrace, Token.Keywords.Modifiers.Public, @@ -147,11 +147,11 @@ public class TestClass2 Token.Keywords.Enum, Token.Identifiers.EnumName("TestEnum2"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("enum1"), + Token.Identifiers.EnumMemberName("enum1"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("10"), Token.Punctuation.Comma, - Token.Variables.EnumMember("enum2"), + Token.Identifiers.EnumMemberName("enum2"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("15"), Token.Punctuation.Comma, diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index e5aa679d74..2abf47a3f8 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -17,7 +17,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Type("Action"), - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Punctuation.OpenParen, Token.Punctuation.CloseParen, @@ -37,7 +37,7 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.Type("Task"), Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, @@ -58,9 +58,9 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -79,10 +79,10 @@ describe("Grammar", () => { Token.Punctuation.Comma, Token.Type("Task"), Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -99,11 +99,11 @@ describe("Grammar", () => { Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -123,12 +123,12 @@ describe("Grammar", () => { Token.Punctuation.Comma, Token.Type("Task"), Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -148,14 +148,14 @@ describe("Grammar", () => { Token.Punctuation.Comma, Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -177,15 +177,15 @@ describe("Grammar", () => { Token.Punctuation.Comma, Token.Type("Task"), Token.Punctuation.TypeParameters.End, - Token.Variables.Local("a"), + Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -236,7 +236,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -253,7 +253,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Async, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -271,7 +271,7 @@ describe("Grammar", () => { Token.Punctuation.OpenParen, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -291,7 +291,7 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -310,10 +310,10 @@ describe("Grammar", () => { Token.Punctuation.OpenParen, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -333,10 +333,10 @@ describe("Grammar", () => { Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -408,7 +408,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Punctuation.OpenParen, Token.PrimitiveType.Object, @@ -424,7 +424,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Punctuation.OpenParen, Token.Type("C"), @@ -480,7 +480,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Keywords.Checked, Token.Punctuation.OpenParen, @@ -496,7 +496,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Keywords.Unchecked, Token.Punctuation.OpenParen, @@ -560,7 +560,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, @@ -575,7 +575,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, @@ -591,7 +591,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, @@ -609,15 +609,15 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), Token.Punctuation.Comma, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), Token.Punctuation.CloseBracket, @@ -631,7 +631,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, @@ -648,7 +648,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Property("P"), Token.Punctuation.OpenBracket, @@ -665,7 +665,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Object("C"), Token.Punctuation.TypeParameters.Begin, @@ -685,7 +685,7 @@ class C tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Object("N"), Token.Punctuation.Accessor, @@ -712,7 +712,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, - Token.Variables.Local("a1"), + Token.Identifiers.LocalName("a1"), Token.Operators.Assignment, Token.Punctuation.OpenBrace, Token.Punctuation.OpenParen, @@ -812,11 +812,11 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), Token.Punctuation.Comma, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), Token.Punctuation.CloseParen, @@ -948,7 +948,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Object("N"), Token.Punctuation.Accessor, @@ -970,7 +970,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Variables.Object("N"), Token.Punctuation.Accessor, @@ -989,7 +989,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Keywords.This, Token.Punctuation.Accessor, @@ -1008,15 +1008,15 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("19"), Token.Punctuation.Comma, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.Colon, Token.Literals.Numeric.Decimal("23"), Token.Punctuation.CloseParen, @@ -1032,7 +1032,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("t"), + Token.Identifiers.LocalName("t"), Token.Operators.Assignment, Token.Keywords.Default, Token.Punctuation.OpenParen, @@ -1050,7 +1050,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("t"), + Token.Identifiers.LocalName("t"), Token.Operators.Assignment, Token.Keywords.TypeOf, Token.Punctuation.OpenParen, @@ -1070,7 +1070,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("n"), @@ -1085,7 +1085,7 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.PrimitiveType.Int, @@ -1104,7 +1104,7 @@ var q = from x in list1 tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("x"), @@ -1126,7 +1126,7 @@ var q = from c in customers tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("c"), @@ -1156,7 +1156,7 @@ var q = from c in customers tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("c"), @@ -1188,7 +1188,7 @@ var q = from o in orders tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("o"), @@ -1216,7 +1216,7 @@ var q = from o in orders tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("o"), @@ -1245,7 +1245,7 @@ var q = from o in orders tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("o"), @@ -1273,7 +1273,7 @@ var q = from n in numbers tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("n"), @@ -1293,7 +1293,7 @@ var q = from n in new[] { 1, 3, 5, 7, 9 } tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("n"), @@ -1330,7 +1330,7 @@ var q = from c in customers tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("q"), + Token.Identifiers.LocalName("q"), Token.Operators.Assignment, Token.Keywords.Queries.From, Token.Identifiers.RangeVariableName("c"), diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index e0b82f290f..be58b76f15 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -195,10 +195,10 @@ const bool field = true;`); tokens.should.deep.equal([ Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Tuple("x"), + Token.Identifiers.TupleElementName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Tuple("y"), + Token.Identifiers.TupleElementName("y"), Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), Token.Punctuation.Semicolon]); @@ -213,10 +213,10 @@ const bool field = true;`); Token.Keywords.Modifiers.Private, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Tuple("x"), + Token.Identifiers.TupleElementName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Tuple("y"), + Token.Identifiers.TupleElementName("y"), Token.Punctuation.CloseParen, Token.Identifiers.FieldName("z"), Token.Punctuation.Semicolon]); diff --git a/test/syntaxes/indexers.test.syntax.ts b/test/syntaxes/indexers.test.syntax.ts index feb6efce05..1f1f18dc98 100644 --- a/test/syntaxes/indexers.test.syntax.ts +++ b/test/syntaxes/indexers.test.syntax.ts @@ -26,7 +26,7 @@ public string this[int index] Token.Keywords.This, Token.Punctuation.OpenBracket, Token.PrimitiveType.Int, - Token.Variables.Parameter("index"), + Token.Identifiers.ParameterName("index"), Token.Punctuation.CloseBracket, Token.Punctuation.OpenBrace, Token.Keywords.Get, @@ -57,7 +57,7 @@ public string this[int index] Token.Keywords.This, Token.Punctuation.OpenBracket, Token.PrimitiveType.Int, - Token.Variables.Parameter("index"), + Token.Identifiers.ParameterName("index"), Token.Punctuation.CloseBracket, Token.Punctuation.Semicolon]); }); @@ -72,7 +72,7 @@ public string this[int index] Token.Keywords.This, Token.Punctuation.OpenBracket, Token.PrimitiveType.Int, - Token.Variables.Parameter("index"), + Token.Identifiers.ParameterName("index"), Token.Punctuation.CloseBracket, Token.Punctuation.OpenBrace, Token.Keywords.Get, @@ -92,7 +92,7 @@ public string this[int index] Token.Keywords.This, Token.Punctuation.OpenBracket, Token.PrimitiveType.Int, - Token.Variables.Parameter("index"), + Token.Identifiers.ParameterName("index"), Token.Punctuation.CloseBracket, Token.Punctuation.OpenBrace, Token.Keywords.Get, @@ -110,7 +110,7 @@ public string this[int index] Token.Keywords.This, Token.Punctuation.OpenBracket, Token.PrimitiveType.Int, - Token.Variables.Parameter("index"), + Token.Identifiers.ParameterName("index"), Token.Punctuation.CloseBracket, Token.Punctuation.OpenBrace, Token.Keywords.Set, diff --git a/test/syntaxes/literals.test.syntax.ts b/test/syntaxes/literals.test.syntax.ts index 8d88a74123..c3a598d07b 100644 --- a/test/syntaxes/literals.test.syntax.ts +++ b/test/syntaxes/literals.test.syntax.ts @@ -43,7 +43,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Punctuation.Char.Begin, Token.Punctuation.Char.End, @@ -56,7 +56,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Punctuation.Char.Begin, Token.Literals.Char("a"), @@ -70,7 +70,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Var, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Punctuation.Char.Begin, Token.Literals.CharacterEscape("\\'"), diff --git a/test/syntaxes/locals.test.syntax.ts b/test/syntaxes/locals.test.syntax.ts index 30e9b37213..a82fb4b228 100644 --- a/test/syntaxes/locals.test.syntax.ts +++ b/test/syntaxes/locals.test.syntax.ts @@ -16,7 +16,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Punctuation.Semicolon ]); }); @@ -27,7 +27,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.Semicolon @@ -40,9 +40,9 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Punctuation.Comma, - Token.Variables.Local("y"), + Token.Identifiers.LocalName("y"), Token.Punctuation.Semicolon ]); }); @@ -53,11 +53,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), Token.Punctuation.Comma, - Token.Variables.Local("y"), + Token.Identifiers.LocalName("y"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("23"), Token.Punctuation.Semicolon @@ -71,7 +71,7 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Const, Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("42"), Token.Punctuation.Semicolon @@ -85,11 +85,11 @@ describe("Grammar", () => { tokens.should.deep.equal([ Token.Keywords.Modifiers.Const, Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("19"), Token.Punctuation.Comma, - Token.Variables.Local("y"), + Token.Identifiers.LocalName("y"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("23"), Token.Punctuation.Semicolon diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index b7b5eab7dd..4d802a37c2 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -38,10 +38,10 @@ int Add(int x, int y) Token.Identifiers.MethodName("Add"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -67,7 +67,7 @@ int Add(int x, int y) Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), - Token.Variables.Parameter("arg"), + Token.Identifiers.ParameterName("arg"), Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("T"), @@ -87,10 +87,10 @@ int Add(int x, int y) Token.Identifiers.MethodName("Add"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Variables.ReadWrite("x"), @@ -140,13 +140,13 @@ int Add(int x, int y) Token.Identifiers.MethodName("GetString"), Token.Punctuation.OpenParen, Token.PrimitiveType.String, - Token.Variables.Parameter("format"), + Token.Identifiers.ParameterName("format"), Token.Punctuation.Comma, Token.Keywords.Modifiers.Params, Token.PrimitiveType.Object, Token.Punctuation.OpenBracket, Token.Punctuation.CloseBracket, - Token.Variables.Parameter("args"), + Token.Identifiers.ParameterName("args"), Token.Punctuation.CloseParen, Token.Punctuation.Semicolon]); }); @@ -166,7 +166,7 @@ int Add(int x, int y) Token.Punctuation.TypeParameters.End, Token.Punctuation.OpenParen, Token.Type("T"), - Token.Variables.Parameter("arg"), + Token.Identifiers.ParameterName("arg"), Token.Punctuation.CloseParen, Token.Keywords.Where, Token.Type("T"), @@ -218,14 +218,14 @@ int Add(int x, int y) Token.Identifiers.MethodName("methodWithParametersCommented"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("p1"), + Token.Identifiers.ParameterName("p1"), Token.Punctuation.Comma, Token.Comment.MultiLine.Start, Token.Comment.MultiLine.Text("int p2"), Token.Comment.MultiLine.End, Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("p3"), + Token.Identifiers.ParameterName("p3"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace @@ -254,7 +254,7 @@ public interface test Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.String, Token.Punctuation.TypeParameters.End, - Token.Variables.Parameter("blah"), + Token.Identifiers.ParameterName("blah"), Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Type("Task"), @@ -267,7 +267,7 @@ public interface test Token.Punctuation.TypeParameters.Begin, Token.Type("T"), Token.Punctuation.TypeParameters.End, - Token.Variables.Parameter("blah"), + Token.Identifiers.ParameterName("blah"), Token.Punctuation.CloseParen, Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace @@ -324,10 +324,10 @@ namespace Test Token.Identifiers.MethodName("AddToGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), - Token.Variables.Parameter("id"), + Token.Identifiers.ParameterName("id"), Token.Punctuation.Comma, Token.PrimitiveType.String, - Token.Variables.Parameter("user"), + Token.Identifiers.ParameterName("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Variables.Object("_commandSender"), @@ -365,10 +365,10 @@ namespace Test Token.Identifiers.MethodName("AddToNotGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), - Token.Variables.Parameter("id"), + Token.Identifiers.ParameterName("id"), Token.Punctuation.Comma, Token.PrimitiveType.String, - Token.Variables.Parameter("user"), + Token.Identifiers.ParameterName("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Variables.Object("_commandSender"), @@ -406,10 +406,10 @@ namespace Test Token.Identifiers.MethodName("AddToNotSureIfGoingUsers"), Token.Punctuation.OpenParen, Token.Type("Guid"), - Token.Variables.Parameter("id"), + Token.Identifiers.ParameterName("id"), Token.Punctuation.Comma, Token.PrimitiveType.String, - Token.Variables.Parameter("user"), + Token.Identifiers.ParameterName("user"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Variables.Object("_commandSender"), diff --git a/test/syntaxes/operators.test.syntax.ts b/test/syntaxes/operators.test.syntax.ts index d021fc269b..c4fd970f91 100644 --- a/test/syntaxes/operators.test.syntax.ts +++ b/test/syntaxes/operators.test.syntax.ts @@ -23,7 +23,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -46,7 +46,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("-"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -69,7 +69,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("!"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -93,7 +93,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("~"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -116,7 +116,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("++"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -139,7 +139,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("--"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -162,7 +162,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("true"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -186,7 +186,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("false"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -210,10 +210,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -237,10 +237,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("-"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -264,10 +264,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("*"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -291,10 +291,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("/"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -318,10 +318,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("%"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -345,10 +345,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("&"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -372,10 +372,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("|"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -399,10 +399,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("^"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -426,10 +426,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("<<"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -453,10 +453,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName(">>"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -480,10 +480,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("=="), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -507,10 +507,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("!="), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -534,10 +534,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName(">"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -561,10 +561,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("<"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -588,10 +588,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName(">="), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -615,10 +615,10 @@ describe("Grammar", () => { Token.Identifiers.MethodName("<="), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Variables.Parameter("y"), + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -642,7 +642,7 @@ describe("Grammar", () => { Token.PrimitiveType.Bool, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -666,7 +666,7 @@ describe("Grammar", () => { Token.PrimitiveType.Bool, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("x"), + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Keywords.Control.Return, @@ -690,7 +690,7 @@ describe("Grammar", () => { Token.Identifiers.MethodName("+"), Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Parameter("value"), + Token.Identifiers.ParameterName("value"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Operators.Arithmetic.Addition, diff --git a/test/syntaxes/preprocessor.test.syntax.ts b/test/syntaxes/preprocessor.test.syntax.ts index 76121c71a3..79466f787d 100644 --- a/test/syntaxes/preprocessor.test.syntax.ts +++ b/test/syntaxes/preprocessor.test.syntax.ts @@ -554,13 +554,13 @@ public enum E Token.Keywords.Enum, Token.Identifiers.EnumName("E"), Token.Punctuation.OpenBrace, - Token.Variables.EnumMember("A"), + Token.Identifiers.EnumMemberName("A"), Token.Punctuation.Comma, - Token.Variables.EnumMember("B"), + Token.Identifiers.EnumMemberName("B"), Token.Operators.Assignment, Token.Variables.ReadWrite("A"), Token.Punctuation.Comma, - Token.Variables.EnumMember("C"), + Token.Identifiers.EnumMemberName("C"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("2"), Token.Operators.Arithmetic.Addition, @@ -570,7 +570,7 @@ public enum E Token.Punctuation.Hash, Token.Keywords.Preprocessor.If, Token.Identifiers.PreprocessorSymbol("DEBUG"), - Token.Variables.EnumMember("D"), + Token.Identifiers.EnumMemberName("D"), Token.Punctuation.Comma, Token.Punctuation.Hash, Token.Keywords.Preprocessor.EndIf, diff --git a/test/syntaxes/statements.test.syntax.ts b/test/syntaxes/statements.test.syntax.ts index 7466d8b794..5323b39108 100644 --- a/test/syntaxes/statements.test.syntax.ts +++ b/test/syntaxes/statements.test.syntax.ts @@ -67,7 +67,7 @@ unchecked Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Local("i"), + Token.Identifiers.LocalName("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), Token.Punctuation.Semicolon, @@ -95,7 +95,7 @@ for (int i = 0; i < 42; i++) Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Local("i"), + Token.Identifiers.LocalName("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), Token.Punctuation.Semicolon, @@ -125,7 +125,7 @@ for (int i = 0; i < 42; i++) Token.Keywords.Control.For, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Local("i"), + Token.Identifiers.LocalName("i"), Token.Operators.Assignment, Token.Literals.Numeric.Decimal("0"), Token.Punctuation.Semicolon, @@ -153,7 +153,7 @@ for (int i = 0; i < 42; i++) Token.Keywords.Control.ForEach, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, - Token.Variables.Local("i"), + Token.Identifiers.LocalName("i"), Token.Keywords.Control.In, Token.Variables.ReadWrite("numbers"), Token.Punctuation.CloseParen, @@ -174,7 +174,7 @@ foreach (var s in myList) Token.Keywords.Control.ForEach, Token.Punctuation.OpenParen, Token.Keywords.Var, - Token.Variables.Local("s"), + Token.Identifiers.LocalName("s"), Token.Keywords.Control.In, Token.Variables.ReadWrite("myList"), Token.Punctuation.CloseParen, @@ -737,7 +737,7 @@ catch (Exception ex) Token.Keywords.Control.Catch, Token.Punctuation.OpenParen, Token.Type("Exception"), - Token.Variables.Local("ex"), + Token.Identifiers.LocalName("ex"), Token.Punctuation.CloseParen, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace @@ -821,7 +821,7 @@ int x;`); Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.PrimitiveType.Int, - Token.Variables.Local("x"), + Token.Identifiers.LocalName("x"), Token.Punctuation.Semicolon ]); }); @@ -924,7 +924,7 @@ using (var o = new object()) Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Keywords.New, Token.PrimitiveType.Object, @@ -950,7 +950,7 @@ using (var o = new object()) Token.Keywords.Using, Token.Punctuation.OpenParen, Token.Keywords.Var, - Token.Variables.Local("o"), + Token.Identifiers.LocalName("o"), Token.Operators.Assignment, Token.Keywords.New, Token.PrimitiveType.Object, diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index dda3539896..02f57638c5 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -177,17 +177,21 @@ export namespace Token { export const AliasName = (text: string) => createToken(text, 'entity.name.type.alias.cs'); export const ClassName = (text: string) => createToken(text, 'entity.name.type.class.cs'); export const DelegateName = (text: string) => createToken(text, 'entity.name.type.delegate.cs'); + export const EnumMemberName = (text: string) => createToken(text, 'entity.name.variable.enum-member.cs'); export const EnumName = (text: string) => createToken(text, 'entity.name.type.enum.cs'); export const EventName = (text: string) => createToken(text, 'entity.name.variable.event.cs'); export const FieldName = (text: string) => createToken(text, 'entity.name.variable.field.cs'); export const InterfaceName = (text: string) => createToken(text, 'entity.name.type.interface.cs'); export const LabelName = (text: string) => createToken(text, 'entity.name.label.cs'); + export const LocalName = (text: string) => createToken(text, 'entity.name.variable.local.cs'); export const MethodName = (text: string) => createToken(text, 'entity.name.function.cs'); export const NamespaceName = (text: string) => createToken(text, 'entity.name.type.namespace.cs'); + export const ParameterName = (text: string) => createToken(text, 'entity.name.variable.parameter.cs'); export const PreprocessorSymbol = (text: string) => createToken(text, 'entity.name.variable.preprocessor.symbol.cs'); export const PropertyName = (text: string) => createToken(text, 'entity.name.variable.property.cs'); export const RangeVariableName = (text: string) => createToken(text, 'entity.name.variable.range-variable.cs'); export const StructName = (text: string) => createToken(text, 'entity.name.type.struct.cs'); + export const TupleElementName = (text: string) => createToken(text, 'entity.name.variable.tuple-element.cs'); export const TypeParameterName = (text: string) => createToken(text, 'entity.name.type.type-parameter.cs'); } @@ -431,13 +435,9 @@ export namespace Token { export namespace Variables { export const Alias = (text: string) => createToken(text, 'variable.other.alias.cs'); - export const EnumMember = (text: string) => createToken(text, 'variable.other.enummember.cs'); - export const Local = (text: string) => createToken(text, 'variable.local.cs'); export const Object = (text: string) => createToken(text, 'variable.other.object.cs'); export const Property = (text: string) => createToken(text, 'variable.other.object.property.cs'); - export const Parameter = (text: string) => createToken(text, 'variable.parameter.cs'); export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); - export const Tuple = (text: string) => createToken(text, 'entity.name.variable.tuple.cs'); } export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); From ae019daee265540cf2322d5f5e75f9127a18ac7f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 9 Jan 2017 16:18:08 -0800 Subject: [PATCH 120/192] Add support for anonymous methods --- syntaxes/csharp.tmLanguage.yml | 13 +- test/syntaxes/expressions.test.syntax.ts | 315 ++++++++++++++++++++--- 2 files changed, 285 insertions(+), 43 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 594d8106f9..f3df50a82d 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,7 +8,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * anonymous methods # * is and as cast expressions # * null coalescing operator # * null propagating operator @@ -1960,6 +1959,18 @@ repository: patterns: - include: '#block' - include: '#expression' + - begin: |- + (?x) + (?:\b(async)\b\s*)? + (?:\b(delegate)\b\s*) + beginCaptures: + '1': { name: storage.modifier.cs } + '2': { name: keyword.other.delegate.cs } + end: (?=\)|;) + patterns: + - include: '#parenthesized-parameter-list' + - include: '#block' + - include: '#expression' type: name: meta.type.cs diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 2abf47a3f8..4d88425480 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -28,19 +28,39 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with no parameters (assignment)", () => { - const input = Input.InMethod(`Func a = async () => { };`); + it("lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Action a = x => { };`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("Func"), + Token.Type("Action"), Token.Punctuation.TypeParameters.Begin, - Token.Type("Task"), + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Identifiers.ParameterName("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (assignment)", () => { + const input = Input.InMethod(`Action a = (int x) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, - Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -49,18 +69,26 @@ describe("Grammar", () => { ]); }); - it("lambda expression with single parameter (assignment)", () => { - const input = Input.InMethod(`Action a = x => { };`); + it("lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Action a = (int x, int y) => { };`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Type("Action"), Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -68,21 +96,20 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with single parameter (assignment)", () => { - const input = Input.InMethod(`Func a = async x => { };`); + it("async lambda expression with no parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async () => { };`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Type("Func"), Token.Punctuation.TypeParameters.Begin, - Token.PrimitiveType.Int, - Token.Punctuation.Comma, Token.Type("Task"), Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, - Token.Identifiers.ParameterName("x"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -90,21 +117,21 @@ describe("Grammar", () => { ]); }); - it("lambda expression with single typed parameter (assignment)", () => { - const input = Input.InMethod(`Action a = (int x) => { };`); + it("async lambda expression with single parameter (assignment)", () => { + const input = Input.InMethod(`Func a = async x => { };`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("Action"), + Token.Type("Func"), Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, - Token.Punctuation.OpenParen, - Token.PrimitiveType.Int, + Token.Keywords.Modifiers.Async, Token.Identifiers.ParameterName("x"), - Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -137,19 +164,22 @@ describe("Grammar", () => { ]); }); - it("lambda expression with multiple typed parameters (assignment)", () => { - const input = Input.InMethod(`Action a = (int x, int y) => { };`); + it("async lambda expression with multiple typed parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async (int x, int y) => { };`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("Action"), + Token.Type("Func"), Token.Punctuation.TypeParameters.Begin, Token.PrimitiveType.Int, Token.Punctuation.Comma, Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), @@ -164,22 +194,113 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with multiple typed parameters (assignment)", () => { - const input = Input.InMethod(`Func a = async (int x, int y) => { };`); + it("anonymous method with no parameter list (assignment)", () => { + const input = Input.InMethod(`Action a = delegate { };`); const tokens = tokenize(input); tokens.should.deep.equal([ - Token.Type("Func"), - Token.Punctuation.TypeParameters.Begin, + Token.Type("Action"), + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Delegate, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("anonymous method with empty parameter list (assignment)", () => { + const input = Input.InMethod(`Action a = delegate() { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Delegate, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("anonymous method with parameters (assignment)", () => { + const input = Input.InMethod(`Action a = delegate(int x, int y) { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Delegate, + Token.Punctuation.OpenParen, Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("x"), Token.Punctuation.Comma, Token.PrimitiveType.Int, - Token.Punctuation.Comma, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async anonymous method with no parameter list (assignment)", () => { + const input = Input.InMethod(`Func a = async delegate { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, Token.Type("Task"), Token.Punctuation.TypeParameters.End, Token.Identifiers.LocalName("a"), Token.Operators.Assignment, Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async anonymous method with empty parameter list (assignment)", () => { + const input = Input.InMethod(`Func a = async delegate() { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + + it("async anonymous method with parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async delegate(int x, int y) { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), @@ -187,7 +308,6 @@ describe("Grammar", () => { Token.PrimitiveType.Int, Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, - Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Punctuation.Semicolon @@ -211,15 +331,32 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with no parameters (passed as argument)", () => { - const input = Input.InMethod(`M(async () => { });`); + it("lambda expression with single parameter (passed as argument)", () => { + const input = Input.InMethod(`M(x => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Identifiers.ParameterName("x"), + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M((int x) => { });`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, - Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("x"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -229,14 +366,38 @@ describe("Grammar", () => { ]); }); - it("lambda expression with single parameter (passed as argument)", () => { - const input = Input.InMethod(`M(x => { });`); + it("lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M((int x, int y) => { });`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async lambda expression with no parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async () => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, @@ -262,13 +423,14 @@ describe("Grammar", () => { ]); }); - it("lambda expression with single typed parameter (passed as argument)", () => { - const input = Input.InMethod(`M((int x) => { });`); + it("async lambda expression with single typed parameter (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x) => { });`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), @@ -281,8 +443,8 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with single typed parameter (passed as argument)", () => { - const input = Input.InMethod(`M(async (int x) => { });`); + it("async lambda expression with multiple typed parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async (int x, int y) => { });`); const tokens = tokenize(input); tokens.should.deep.equal([ @@ -292,6 +454,9 @@ describe("Grammar", () => { Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, Token.Operators.Arrow, Token.Punctuation.OpenBrace, @@ -301,13 +466,46 @@ describe("Grammar", () => { ]); }); - it("lambda expression with multiple typed parameters (passed as argument)", () => { - const input = Input.InMethod(`M((int x, int y) => { });`); + it("anonymous method with no parameter list (passed as argument)", () => { + const input = Input.InMethod(`M(delegate { });`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, + Token.Keywords.Delegate, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("anonymous method with empty parameter list (passed as argument)", () => { + const input = Input.InMethod(`M(delegate() { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Delegate, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("anonymous method with parameters (passed as argument)", () => { + const input = Input.InMethod(`M(delegate(int x, int y) { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Delegate, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), @@ -315,7 +513,6 @@ describe("Grammar", () => { Token.PrimitiveType.Int, Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, - Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Punctuation.CloseParen, @@ -323,14 +520,49 @@ describe("Grammar", () => { ]); }); - it("async lambda expression with multiple typed parameters (passed as argument)", () => { - const input = Input.InMethod(`M(async (int x, int y) => { });`); + it("async anonymous method with no parameter list (passed as argument)", () => { + const input = Input.InMethod(`M(async delegate { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async anonymous method with empty parameter list (passed as argument)", () => { + const input = Input.InMethod(`M(async delegate() { });`); const tokens = tokenize(input); tokens.should.deep.equal([ Token.Identifiers.MethodName("M"), Token.Punctuation.OpenParen, Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("async anonymous method with parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async delegate(int x, int y) { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Keywords.Delegate, Token.Punctuation.OpenParen, Token.PrimitiveType.Int, Token.Identifiers.ParameterName("x"), @@ -338,7 +570,6 @@ describe("Grammar", () => { Token.PrimitiveType.Int, Token.Identifiers.ParameterName("y"), Token.Punctuation.CloseParen, - Token.Operators.Arrow, Token.Punctuation.OpenBrace, Token.Punctuation.CloseBrace, Token.Punctuation.CloseParen, From 78dae2267e84b6aef7546904c1b5081aa7d123b0 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 08:20:50 -0800 Subject: [PATCH 121/192] Add support for null coalescing operator --- syntaxes/csharp.tmLanguage.yml | 3 +- test/syntaxes/expressions.test.syntax.ts | 38 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 13 ++++---- 3 files changed, 46 insertions(+), 8 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index f3df50a82d..55b8f2c8b0 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -9,7 +9,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # # * Refinement and tests to ensure proper highlighting while typing # * is and as cast expressions -# * null coalescing operator # * null propagating operator # * conditional operator # * compound assignement @@ -1438,6 +1437,8 @@ repository: match: \+\+ - name: keyword.operator.arithmetic.cs match: '%|\*|/|-|\+' + - name: keyword.operator.null-coalescing.cs + match: \?\? assignment-expression: match: \.*\s*(=)\s*\.*(?=;) diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 4d88425480..34802bc1da 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -1256,6 +1256,44 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); }); }); + describe("Null-coalescing Operator", () => { + it("in assignment", () => { + const input = Input.InMethod(`var y = x ?? new object();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("y"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("x"), + Token.Operators.NullCoalescing, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("passed as argument", () => { + const input = Input.InMethod(`M(x ?? new object());`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("x"), + Token.Operators.NullCoalescing, + Token.Keywords.New, + Token.PrimitiveType.Object, + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + describe("Primary", () => { it("default", () => { const input = Input.InMethod(`var t = default(List<>);`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 02f57638c5..25a5fd16de 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -329,8 +329,6 @@ export namespace Token { } export namespace Operators { - export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); - export namespace Arithmetic { export const Addition = createToken('+', 'keyword.operator.arithmetic.cs'); export const Division = createToken('/', 'keyword.operator.arithmetic.cs'); @@ -339,8 +337,6 @@ export namespace Token { export const Subtraction = createToken('-', 'keyword.operator.arithmetic.cs'); } - export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); - export namespace Bitwise { export const And = createToken('&', 'keyword.operator.bitwise.cs'); export const BitwiseComplement = createToken('~', 'keyword.operator.bitwise.cs'); @@ -350,9 +346,6 @@ export namespace Token { export const ShiftRight = createToken('>>', 'keyword.operator.bitwise.shift.cs'); } - export const Decrement = createToken('--', 'keyword.operator.decrement.cs'); - export const Increment = createToken('++', 'keyword.operator.increment.cs'); - export namespace Logical { export const And = createToken('&&', 'keyword.operator.logical.cs'); export const Not = createToken('!', 'keyword.operator.logical.cs'); @@ -368,6 +361,12 @@ export namespace Token { export const GreaterThan = createToken('>', 'keyword.operator.relational.cs'); export const GreaterThanOrEqual = createToken('>=', 'keyword.operator.relational.cs'); } + + export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); + export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); + export const Decrement = createToken('--', 'keyword.operator.decrement.cs'); + export const Increment = createToken('++', 'keyword.operator.increment.cs'); + export const NullCoalescing = createToken('??', 'keyword.operator.null-coalescing.cs'); } export namespace PrimitiveType { From a125c41560fb15c13f06face8124ab3bd353a9df Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 08:39:17 -0800 Subject: [PATCH 122/192] Add support for conditional operator --- syntaxes/csharp.tmLanguage.yml | 13 +++++---- test/syntaxes/expressions.test.syntax.ts | 36 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 5 ++++ 3 files changed, 49 insertions(+), 5 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 55b8f2c8b0..edb2629bf7 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -10,7 +10,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * Refinement and tests to ensure proper highlighting while typing # * is and as cast expressions # * null propagating operator -# * conditional operator # * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences @@ -135,6 +134,7 @@ repository: - include: '#verbatim-interpolated-string' - include: '#literal' - include: '#this-or-base-expression' + - include: '#conditional-operator' - include: '#expression-operators' - include: '#query-expression' - include: '#anonymous-method-expression' @@ -1440,10 +1440,13 @@ repository: - name: keyword.operator.null-coalescing.cs match: \?\? - assignment-expression: - match: \.*\s*(=)\s*\.*(?=;) - captures: - '1': keyword.operator.assignment.cs + conditional-operator: + begin: (? { + it("in assignment", () => { + const input = Input.InMethod(`var y = x ? 19 : 23;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("y"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("x"), + Token.Operators.Conditional.QuestionMark, + Token.Literals.Numeric.Decimal("19"), + Token.Operators.Conditional.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.Semicolon + ]); + }); + + it("passed as argument", () => { + const input = Input.InMethod(`M(x ? 19 : 23);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("x"), + Token.Operators.Conditional.QuestionMark, + Token.Literals.Numeric.Decimal("19"), + Token.Operators.Conditional.Colon, + Token.Literals.Numeric.Decimal("23"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + describe("Element Access", () => { it("no arguments", () => { const input = Input.InMethod(`var o = P[];`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 25a5fd16de..bb24d36d65 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -346,6 +346,11 @@ export namespace Token { export const ShiftRight = createToken('>>', 'keyword.operator.bitwise.shift.cs'); } + export namespace Conditional { + export const QuestionMark = createToken('?', 'keyword.operator.conditional.question-mark.cs'); + export const Colon = createToken(':', 'keyword.operator.conditional.colon.cs'); + } + export namespace Logical { export const And = createToken('&&', 'keyword.operator.logical.cs'); export const Not = createToken('!', 'keyword.operator.logical.cs'); From 9fdc7c99aeec3603648e49b6bc423f271e3e09d1 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 08:59:25 -0800 Subject: [PATCH 123/192] Allow attributes on property and event accessors --- syntaxes/csharp.tmLanguage.yml | 2 + test/syntaxes/events.test.syntax.ts | 53 +++++++++++++++++++++---- test/syntaxes/properties.test.syntax.ts | 48 ++++++++++++++++------ 3 files changed, 83 insertions(+), 20 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index edb2629bf7..8274dd66d6 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -644,6 +644,7 @@ repository: match: \b(get)\b - name: keyword.other.set.cs match: \b(set)\b + - include: '#attribute-section' - include: '#block' - include: '#punctuation-semicolon' @@ -659,6 +660,7 @@ repository: match: \b(add)\b - name: keyword.other.remove.cs match: \b(remove)\b + - include: '#attribute-section' - include: '#block' - include: '#punctuation-semicolon' diff --git a/test/syntaxes/events.test.syntax.ts b/test/syntaxes/events.test.syntax.ts index 95b9f9a838..c82828ba6f 100644 --- a/test/syntaxes/events.test.syntax.ts +++ b/test/syntaxes/events.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Events", () => { it("declaration", () => { - const input = Input.InClass(`public event Type Event;`); const tokens = tokenize(input); @@ -24,7 +23,6 @@ describe("Grammar", () => { }); it("declaration with multiple modifiers", () => { - const input = Input.InClass(`protected internal event Type Event;`); const tokens = tokenize(input); @@ -38,7 +36,6 @@ describe("Grammar", () => { }); it("declaration with multiple declarators", () => { - const input = Input.InClass(`public event Type Event1, Event2;`); const tokens = tokenize(input); @@ -53,7 +50,6 @@ describe("Grammar", () => { }); it("generic", () => { - const input = Input.InClass(`public event EventHandler, Dictionary> Event;`); const tokens = tokenize(input); @@ -79,7 +75,6 @@ describe("Grammar", () => { }); it("declaration with accessors", () => { - const input = Input.InClass(` public event Type Event { @@ -105,7 +100,6 @@ public event Type Event }); it("explicitly-implemented interface member", () => { - const input = Input.InClass(`event EventHandler IFoo.Event { add; remove; }`); const tokens = tokenize(input); @@ -127,7 +121,6 @@ public event Type Event }); it("declaration in interface", () => { - const input = Input.InInterface(`event EventHandler Event;`); const tokens = tokenize(input); @@ -137,5 +130,51 @@ public event Type Event Token.Identifiers.EventName("Event"), Token.Punctuation.Semicolon]); }); + + it("declaration with attributes", () => { + const input = Input.InClass(` +[event: Test] +public event Action E1 +{ + [Obsolete] + add { } + [Obsolete] + [return: Obsolete] + remove { } +}`); + + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.OpenBracket, + Token.Keywords.AttributeSpecifier("event"), + Token.Punctuation.Colon, + Token.Type("Test"), + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.Keywords.Event, + Token.Type("Action"), + Token.Identifiers.EventName("E1"), + Token.Punctuation.OpenBrace, + Token.Punctuation.OpenBracket, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Keywords.Add, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.OpenBracket, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Punctuation.OpenBracket, + Token.Keywords.AttributeSpecifier("return"), + Token.Punctuation.Colon, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Keywords.Remove, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file diff --git a/test/syntaxes/properties.test.syntax.ts b/test/syntaxes/properties.test.syntax.ts index 81439ca5ea..d67f83f090 100644 --- a/test/syntaxes/properties.test.syntax.ts +++ b/test/syntaxes/properties.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Property", () => { it("declaration", () => { - const input = Input.InClass(` public IBooom Property { @@ -42,7 +41,6 @@ public IBooom Property }); it("declaration single line", () => { - const input = Input.InClass(`public IBooom Property { get { return null; } private set { something = value; } }`); const tokens = tokenize(input); @@ -69,7 +67,6 @@ public IBooom Property }); it("declaration without modifiers", () => { - const input = Input.InClass(`IBooom Property {get; set;}`); const tokens = tokenize(input); @@ -85,7 +82,6 @@ public IBooom Property }); it("auto-property single line", function () { - const input = Input.InClass(`public IBooom Property { get; set; }`); const tokens = tokenize(input); @@ -102,7 +98,6 @@ public IBooom Property }); it("auto-property single line (protected internal)", function () { - const input = Input.InClass(`protected internal IBooom Property { get; set; }`); const tokens = tokenize(input); @@ -120,7 +115,6 @@ public IBooom Property }); it("auto-property", () => { - const input = Input.InClass(` public IBooom Property { @@ -142,7 +136,6 @@ public IBooom Property }); it("generic auto-property", () => { - const input = Input.InClass(`public Dictionary[]> Property { get; set; }`); const tokens = tokenize(input); @@ -169,7 +162,6 @@ public IBooom Property }); it("auto-property initializer", () => { - const input = Input.InClass(`public Dictionary[]> Property { get; } = new Dictionary[]>();`); const tokens = tokenize(input); @@ -210,7 +202,6 @@ public IBooom Property }); it("expression body", () => { - const input = Input.InClass(` private string prop1 => "hello"; private bool prop2 => true;`); @@ -235,7 +226,6 @@ private bool prop2 => true;`); }); it("explicitly-implemented interface member", () => { - const input = Input.InClass(`string IFoo.Bar { get; set; }`); const tokens = tokenize(input); @@ -256,7 +246,6 @@ private bool prop2 => true;`); }); it("declaration in interface", () => { - const input = Input.InInterface(`string Bar { get; set; }`); const tokens = tokenize(input); @@ -272,7 +261,6 @@ private bool prop2 => true;`); }); it("declaration in interface (read-only)", () => { - const input = Input.InInterface(`string Bar { get; }`); const tokens = tokenize(input); @@ -286,7 +274,6 @@ private bool prop2 => true;`); }); it("declaration in interface (write-only)", () => { - const input = Input.InInterface(`string Bar { set; }`); const tokens = tokenize(input); @@ -298,5 +285,40 @@ private bool prop2 => true;`); Token.Punctuation.Semicolon, Token.Punctuation.CloseBrace]); }); + + it("declaration with attributes", () => { + const input = Input.InClass(` +[Obsolete] +public int P1 +{ + [Obsolete] + get { } + [Obsolete] + set { } +}`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Punctuation.OpenBracket, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Keywords.Modifiers.Public, + Token.PrimitiveType.Int, + Token.Identifiers.PropertyName("P1"), + Token.Punctuation.OpenBrace, + Token.Punctuation.OpenBracket, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Keywords.Get, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.OpenBracket, + Token.Type("Obsolete"), + Token.Punctuation.CloseBracket, + Token.Keywords.Set, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace]); + }); }); }); \ No newline at end of file From 446395f8db570a4162bbca3690a472cdb9741397 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 09:05:31 -0800 Subject: [PATCH 124/192] Choose different keyword name for 'operator' since 'keyword.other.operator.cs' has a pre-defined meaning --- syntaxes/csharp.tmLanguage.yml | 4 ++-- test/syntaxes/utils/tokenize.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 8274dd66d6..77faefcc3b 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -780,7 +780,7 @@ repository: # '3': ? is a sub-expression. It's final value is not considered. # '4': ? is a sub-expression. It's final value is not considered. # '5': ? is a sub-expression. It's final value is not considered. - '6': { name: keyword.other.operator.cs } + '6': { name: keyword.other.operator-decl.cs } '7': { name: entity.name.function.cs } end: (?<=\})|(?=;) patterns: @@ -818,7 +818,7 @@ repository: - match: \b(implicit)\b captures: '1': { name: keyword.other.implicit.cs } - '2': { name: keyword.other.operator.cs } + '2': { name: keyword.other.operator-decl.cs } '3': patterns: - include: '#type' diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index bb24d36d65..b3d1623a20 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -296,7 +296,7 @@ export namespace Token { export const Lock = createToken('lock', 'keyword.other.lock.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); - export const Operator = createToken('operator', 'keyword.other.operator.cs'); + export const Operator = createToken('operator', 'keyword.other.operator-decl.cs'); export const Remove = createToken('remove', 'keyword.other.remove.cs'); export const Set = createToken('set', 'keyword.other.set.cs'); export const Static = createToken('static', 'keyword.other.static.cs'); From ac01c0ac27246993084e3d24997c59bcaa9fb186 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 09:33:05 -0800 Subject: [PATCH 125/192] Add support for XML doc comments by leveraging 'text.xml' --- syntaxes/csharp.tmLanguage.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 77faefcc3b..c6bb420ff2 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -14,7 +14,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * verbatim identifiers # * hexadecimal and unicode character escape sequences # * unsafe code: fixed, sizeof, unsafe blocks -# * XML doc comments patterns: - include: '#preprocessor' @@ -2228,8 +2227,15 @@ repository: '1': { name: punctuation.whitespace.comment.leading.cs } end: (?=$) patterns: + - name: comment.block.documentation.cs + begin: (? Date: Tue, 10 Jan 2017 10:01:25 -0800 Subject: [PATCH 126/192] Add regression test for issue #316 --- test/syntaxes/fields.test.syntax.ts | 36 +++++++++++++++++++---------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/test/syntaxes/fields.test.syntax.ts b/test/syntaxes/fields.test.syntax.ts index be58b76f15..8b7a063adf 100644 --- a/test/syntaxes/fields.test.syntax.ts +++ b/test/syntaxes/fields.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Field", () => { it("declaration", () => { - const input = Input.InClass(` private List _field; private List field; @@ -37,7 +36,6 @@ private List field123;`); }); it("generic", () => { - const input = Input.InClass(`private Dictionary< List, Dictionary> _field;`); const tokens = tokenize(input); @@ -63,7 +61,6 @@ private List field123;`); it("modifiers", () => { - const input = Input.InClass(` private static readonly List _field; readonly string _field2; @@ -90,7 +87,6 @@ string _field3;`); }); it("types", () => { - const input = Input.InClass(` string field123; string[] field123;`); @@ -110,7 +106,6 @@ string[] field123;`); }); it("assignment", () => { - const input = Input.InClass(` private string field = "hello"; const bool field = true;`); @@ -136,7 +131,6 @@ const bool field = true;`); }); it("declaration with multiple declarators", () => { - const input = Input.InClass(`int x = 19, y = 23, z = 42;`); const tokens = tokenize(input); @@ -157,7 +151,6 @@ const bool field = true;`); }); it("tuple type with no names and no modifiers", () => { - const input = Input.InClass(`(int, int) x;`); const tokens = tokenize(input); @@ -172,7 +165,6 @@ const bool field = true;`); }); it("tuple type with no names and private modifier", () => { - const input = Input.InClass(`private (int, int) x;`); const tokens = tokenize(input); @@ -188,7 +180,6 @@ const bool field = true;`); }); it("tuple type with names and no modifiers", () => { - const input = Input.InClass(`(int x, int y) z;`); const tokens = tokenize(input); @@ -205,7 +196,6 @@ const bool field = true;`); }); it("tuple type with names and private modifier", () => { - const input = Input.InClass(`private (int x, int y) z;`); const tokens = tokenize(input); @@ -223,7 +213,6 @@ const bool field = true;`); }); it("Fields with fully-qualified names are highlighted properly (issue #1097)", () => { - const input = Input.InClass(` private CanvasGroup[] groups; private UnityEngine.UI.Image[] selectedImages; @@ -251,7 +240,6 @@ private UnityEngine.UI.Image[] selectedImages; }); it("Fields with dictionary initializer highlights properly (issue #1096)", () => { - const input = Input.InClass(` private readonly Dictionary languageToIndex = new Dictionary() { @@ -319,5 +307,29 @@ private readonly Dictionary languageToIndex = new Dictionary { + const input = Input.InClass(` +private readonly string initSportMessageFormatString = "line1" + + "line2";`); + + let tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Private, + Token.Keywords.Modifiers.ReadOnly, + Token.PrimitiveType.String, + Token.Identifiers.FieldName("initSportMessageFormatString"), + Token.Operators.Assignment, + Token.Punctuation.String.Begin, + Token.Literals.String("line1"), + Token.Punctuation.String.End, + Token.Operators.Arithmetic.Addition, + Token.Punctuation.String.Begin, + Token.Literals.String("line2"), + Token.Punctuation.String.End, + Token.Punctuation.Semicolon + ]); + }); }); }); From e467be77730a712d330359e37d3198c4f6c3c0a1 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 11:56:26 -0800 Subject: [PATCH 127/192] Handle XML doc comments rather than delegating to text.xml (so we can write tests!) and add regression test for issue #706 --- syntaxes/csharp.tmLanguage.yml | 114 +++++++++- test/syntaxes/utils/tokenize.ts | 57 ++++- test/syntaxes/xml-doc-comments.test.syntax.ts | 211 ++++++++++++++++++ 3 files changed, 379 insertions(+), 3 deletions(-) create mode 100644 test/syntaxes/xml-doc-comments.test.syntax.ts diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index c6bb420ff2..db0989f57e 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -13,7 +13,8 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences -# * unsafe code: fixed, sizeof, unsafe blocks +# * unsafe code: fixed, sizeof, unsafe blocks, pointer member access +# * multi-line XML doc comments patterns: - include: '#preprocessor' @@ -2233,9 +2234,118 @@ repository: '0': { name: punctuation.definition.comment.cs } end: (?=$) patterns: - - include: 'text.xml' + - include: '#xml-doc-comment' - name: comment.line.double-slash.cs begin: (?) + endCaptures: + '1': { name: punctuation.definition.tag.cs } + patterns: + - include: '#xml-attribute' + + xml-attribute: + patterns: + - match: |- + (?x) + (?:^|\s+) + ( + (?: + ([-_[:alnum:]]+) + (:) + )? + ([-_[:alnum:]]+) + ) + (=) + captures: + '1': { name: entity.other.attribute-name.cs } + '2': { name: entity.other.attribute-name.namespace.cs } + '3': { name: punctuation.separator.colon.cs } + '4': { name: entity.other.attribute-name.localname.cs } + '5': { name: punctuation.separator.equals.cs } + - include: '#xml-string' + + xml-cdata: + name: string.unquoted.cdata.cs + begin: + endCaptures: + '0': { name: punctuation.definition.string.end.cs } + + xml-string: + patterns: + - name: string.quoted.single.cs + begin: \' + beginCaptures: + '0': { name: punctuation.definition.string.begin.cs } + end: \' + endCaptures: + '0': { name: punctuation.definition.string.end.cs } + patterns: + - include: '#xml-character-entity' + - name: string.quoted.double.cs + begin: \" + beginCaptures: + '0': { name: punctuation.definition.string.begin.cs } + end: \" + endCaptures: + '0': { name: punctuation.definition.string.end.cs } + patterns: + - include: '#xml-character-entity' + + xml-character-entity: + patterns: + - name: constant.character.entity.cs + match: |- + (?x) + (&) + ( + (?:[[:alpha:]:_][[:alnum:]:_.-]*)| + (?:\#[[:digit:]]+)| + (?:\#x[[:xdigit:]]+) + ) + (;) + captures: + '1': { name: punctuation.definition.constant.cs } + '3': { name: punctuation.definition.constant.cs } + - name: invalid.illegal.bad-ampersand.cs + match: '&' + + xml-comment: + name: comment.block.cs + begin: + endCaptures: + '0': { name: punctuation.definition.comment.cs } diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index b3d1623a20..bc4758f375 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -7,7 +7,7 @@ import { ITokenizeLineResult, Registry, StackElement } from 'vscode-textmate'; const registry = new Registry(); const grammar = registry.loadGrammarFromPathSync('syntaxes/csharp.tmLanguage'); -const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.preprocessor.cs', 'meta.type.parameters.cs'] +const excludedTypes = ['source.cs', 'meta.interpolation.cs', 'meta.preprocessor.cs', 'meta.tag.cs', 'meta.type.parameters.cs'] export function tokenize(input: string | Input, excludeTypes: boolean = true): Token[] { if (typeof input === "string") { @@ -444,6 +444,61 @@ export namespace Token { export const ReadWrite = (text: string) => createToken(text, 'variable.other.readwrite.cs'); } + export namespace XmlDocComments { + export namespace Attribute { + export const Name = (text: string) => createToken(text, 'entity.other.attribute-name.localname.cs'); + } + + export namespace CData { + export const Begin = createToken('', 'punctuation.definition.string.end.cs'); + export const Text = (text: string) => createToken(text, 'string.unquoted.cdata.cs'); + } + + export namespace CharacterEntity { + export const Begin = createToken('&', 'punctuation.definition.constant.cs'); + export const End = createToken(';', 'punctuation.definition.constant.cs'); + export const Text = (text: string) => createToken(text, 'constant.character.entity.cs'); + } + + export namespace Comment { + export const Begin = createToken('', 'punctuation.definition.comment.cs') + export const Text = (text: string) => createToken(text, 'comment.block.cs') + } + + export namespace Tag { + // punctuation + export const StartTagBegin = createToken('<', 'punctuation.definition.tag.cs'); + export const StartTagEnd = createToken('>', 'punctuation.definition.tag.cs'); + export const EndTagBegin = createToken('', 'punctuation.definition.tag.cs'); + export const EmptyTagBegin = createToken('<', 'punctuation.definition.tag.cs'); + export const EmptyTagEnd = createToken('/>', 'punctuation.definition.tag.cs'); + + export const Name = (text: string) => createToken(text, 'entity.name.tag.localname.cs'); + } + + export namespace String { + export namespace DoubleQuoted { + export const Begin = createToken('"', 'punctuation.definition.string.begin.cs'); + export const End = createToken('"', 'punctuation.definition.string.end.cs'); + export const Text = (text: string) => createToken(text, 'string.quoted.double.cs'); + } + + export namespace SingleQuoted { + export const Begin = createToken('\'', 'punctuation.definition.string.begin.cs'); + export const End = createToken('\'', 'punctuation.definition.string.end.cs'); + export const Text = (text: string) => createToken(text, 'string.quoted.single.cs'); + } + } + + export const Begin = createToken('///', 'punctuation.definition.comment.cs'); + export const Colon = createToken(':', 'punctuation.separator.colon.cs'); + export const Equals = createToken('=', 'punctuation.separator.equals.cs'); + export const Text = (text: string) => createToken(text, 'comment.block.documentation.cs'); + } + export const IllegalNewLine = (text: string) => createToken(text, 'invalid.illegal.newline.cs'); export const PreprocessorMessage = (text: string) => createToken(text, 'string.unquoted.preprocessor.message.cs'); export const Type = (text: string) => createToken(text, 'storage.type.cs'); diff --git a/test/syntaxes/xml-doc-comments.test.syntax.ts b/test/syntaxes/xml-doc-comments.test.syntax.ts new file mode 100644 index 0000000000..9eb6f5d168 --- /dev/null +++ b/test/syntaxes/xml-doc-comments.test.syntax.ts @@ -0,0 +1,211 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { should } from 'chai'; +import { tokenize, Input, Token } from './utils/tokenize'; + +describe("Grammar", () => { + before(() => should()); + + describe("XML Doc Comments", () => { + it("start tag", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.StartTagEnd + ]); + }); + + it("end tag", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.EndTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.EndTagEnd + ]); + }); + + it("empty tag", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.EmptyTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.EmptyTagEnd + ]); + }); + + it("start tag with attribute and single-quoted string", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("param"), + Token.XmlDocComments.Attribute.Name("name"), + Token.XmlDocComments.Equals, + Token.XmlDocComments.String.SingleQuoted.Begin, + Token.XmlDocComments.String.SingleQuoted.Text("x"), + Token.XmlDocComments.String.SingleQuoted.End, + Token.XmlDocComments.Tag.StartTagEnd + ]); + }); + + it("start tag with attribute and double-quoted string", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("param"), + Token.XmlDocComments.Attribute.Name("name"), + Token.XmlDocComments.Equals, + Token.XmlDocComments.String.DoubleQuoted.Begin, + Token.XmlDocComments.String.DoubleQuoted.Text("x"), + Token.XmlDocComments.String.DoubleQuoted.End, + Token.XmlDocComments.Tag.StartTagEnd + ]); + }); + + it("comment", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Comment.Begin, + Token.XmlDocComments.Comment.Text(" comment "), + Token.XmlDocComments.Comment.End + ]); + }); + + it("cdata", () => { + const input = `/// `; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.CData.Begin, + Token.XmlDocComments.CData.Text("c"), + Token.XmlDocComments.CData.End + ]); + }); + + it("character entity - name", () => { + const input = `/// &`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.CharacterEntity.Begin, + Token.XmlDocComments.CharacterEntity.Text("amp"), + Token.XmlDocComments.CharacterEntity.End + ]); + }); + + it("character entity - decimal", () => { + const input = `/// &`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.CharacterEntity.Begin, + Token.XmlDocComments.CharacterEntity.Text("#0038"), + Token.XmlDocComments.CharacterEntity.End + ]); + }); + + it("character entity - hdex", () => { + const input = `/// &`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.CharacterEntity.Begin, + Token.XmlDocComments.CharacterEntity.Text("#x0026"), + Token.XmlDocComments.CharacterEntity.End + ]); + }); + + it("XML doc comments are highlighted properly on enum members (issue #706)", () => { + const input = ` +/// This is a test Enum +public enum TestEnum +{ + /// Test Value One + TestValueOne= 0, + /// Test Value Two + TestValueTwo = 1 +}`; + + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.StartTagEnd, + Token.XmlDocComments.Text(" This is a test Enum "), + Token.XmlDocComments.Tag.EndTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.EndTagEnd, + Token.Keywords.Modifiers.Public, + Token.Keywords.Enum, + Token.Identifiers.EnumName("TestEnum"), + Token.Punctuation.OpenBrace, + Token.Comment.LeadingWhitespace(" "), + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.StartTagEnd, + Token.XmlDocComments.Text(" Test Value One "), + Token.XmlDocComments.Tag.EndTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.EndTagEnd, + Token.Identifiers.EnumMemberName("TestValueOne"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.Comma, + Token.Comment.LeadingWhitespace(" "), + Token.XmlDocComments.Begin, + Token.XmlDocComments.Text(" "), + Token.XmlDocComments.Tag.StartTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.StartTagEnd, + Token.XmlDocComments.Text(" Test Value Two "), + Token.XmlDocComments.Tag.EndTagBegin, + Token.XmlDocComments.Tag.Name("summary"), + Token.XmlDocComments.Tag.EndTagEnd, + Token.Identifiers.EnumMemberName("TestValueTwo"), + Token.Operators.Assignment, + Token.Literals.Numeric.Decimal("1"), + Token.Punctuation.CloseBrace + ]); + }); + }); +}); From 6cda9f5a39e69da79c9435ec422b77fd4e9a5215 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 12:01:04 -0800 Subject: [PATCH 128/192] Add regression tests for issue #1091 --- test/syntaxes/methods.test.syntax.ts | 86 +++++++++++++++++++++++----- 1 file changed, 72 insertions(+), 14 deletions(-) diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index 4d802a37c2..c0e1d91da0 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Methods", () => { it("single-line declaration with no parameters", () => { - const input = Input.InClass(`void Foo() { }`); const tokens = tokenize(input); @@ -25,7 +24,6 @@ describe("Grammar", () => { }); it("declaration with two parameters", () => { - const input = Input.InClass(` int Add(int x, int y) { @@ -53,7 +51,6 @@ int Add(int x, int y) }); it("declaration in with generic constraints", () => { - const input = Input.InClass(`TResult GetString(T arg) where T : TResult { }`); const tokens = tokenize(input); @@ -78,7 +75,6 @@ int Add(int x, int y) }); it("expression body", () => { - const input = Input.InClass(`int Add(int x, int y) => x + y;`); const tokens = tokenize(input); @@ -100,7 +96,6 @@ int Add(int x, int y) }); it("explicitly-implemented interface member", () => { - const input = Input.InClass(`string IFoo.GetString();`); const tokens = tokenize(input); @@ -118,7 +113,6 @@ int Add(int x, int y) }); it("declaration in interface", () => { - const input = Input.InInterface(`string GetString();`); const tokens = tokenize(input); @@ -131,7 +125,6 @@ int Add(int x, int y) }); it("declaration in interface with parameters", () => { - const input = Input.InInterface(`string GetString(string format, params object[] args);`); const tokens = tokenize(input); @@ -152,7 +145,6 @@ int Add(int x, int y) }); it("declaration in interface with generic constraints", () => { - const input = Input.InInterface(`TResult GetString(T arg) where T : TResult;`); const tokens = tokenize(input); @@ -176,7 +168,6 @@ int Add(int x, int y) }); it("public override", () => { - const input = Input.InClass(`public override M() { }`); const tokens = tokenize(input); @@ -192,7 +183,6 @@ int Add(int x, int y) }); it("public virtual", () => { - const input = Input.InClass(`public virtual M() { }`); const tokens = tokenize(input); @@ -208,7 +198,6 @@ int Add(int x, int y) }); it("commented parameters are highlighted properly (issue #802)", () => { - const input = Input.InClass(`public void methodWithParametersCommented(int p1, /*int p2*/, int p3) {}`); const tokens = tokenize(input); @@ -233,7 +222,6 @@ int Add(int x, int y) }); it("return type is highlighted properly in interface (issue #830)", () => { - const input = ` public interface test { @@ -275,7 +263,6 @@ public interface test }); it("attributes are highlighted properly (issue #829)", () => { - const input = ` namespace Test { @@ -434,7 +421,6 @@ namespace Test }); it("shadowed methods are highlighted properly (issue #1084)", () => { - const input = Input.InClass(` private new void foo1() //Correct highlight { @@ -468,5 +454,77 @@ new void foo2() //Function name not highlighted Token.Punctuation.CloseBrace ]); }); + + it("comment at end of line does not change highlights - 1 (issue #1091)", () => { + const input = Input.InClass(` +public abstract void Notify(PlayerId playerId, ISessionResponse response); //the +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Abstract, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("Notify"), + Token.Punctuation.OpenParen, + Token.Type("PlayerId"), + Token.Identifiers.ParameterName("playerId"), + Token.Punctuation.Comma, + Token.Type("ISessionResponse"), + Token.Identifiers.ParameterName("response"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("the") + ]); + }); + + it("comment at end of line does not change highlights - 2 (issue #1091)", () => { + const input = Input.InClass(` +public abstract void Notify(PlayerId playerId, ISessionResponse response); //the +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Abstract, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("Notify"), + Token.Punctuation.OpenParen, + Token.Type("PlayerId"), + Token.Identifiers.ParameterName("playerId"), + Token.Punctuation.Comma, + Token.Type("ISessionResponse"), + Token.Identifiers.ParameterName("response"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("the ") + ]); + }); + + it("comment at end of line does not change highlights - 3 (issue #1091)", () => { + const input = Input.InClass(` +public abstract void Notify(PlayerId playerId, ISessionResponse response); //the a +`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.Keywords.Modifiers.Abstract, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("Notify"), + Token.Punctuation.OpenParen, + Token.Type("PlayerId"), + Token.Identifiers.ParameterName("playerId"), + Token.Punctuation.Comma, + Token.Type("ISessionResponse"), + Token.Identifiers.ParameterName("response"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text("the a") + ]); + }); }); }); \ No newline at end of file From 75a1c99726e6bc9f37d098dd4ab7721574a34a5c Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 12:21:33 -0800 Subject: [PATCH 129/192] Switch to new grammar --- package.json | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index d07987caba..06f5514647 100644 --- a/package.json +++ b/package.json @@ -394,7 +394,7 @@ { "language": "csharp", "scopeName": "source.cs", - "path": "./syntaxes/csharp.json" + "path": "./syntaxes/csharp.tmLanguage" } ], "jsonValidation": [ @@ -478,13 +478,11 @@ "razor" ] }, - "runtime": "node", "runtimeArgs": [], "variables": { "pickProcess": "csharp.listProcess", "pickRemoteProcess": "csharp.listRemoteProcess" }, - "program": "./out/src/coreclr-debug/proxy.js", "aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217", "configurationAttributes": { "launch": { @@ -1144,8 +1142,17 @@ "request": "attach", "processId": "${command.pickProcess}" } - ] + ], + "windows": { + "program": "./.debugger/OpenDebugAD7.exe" + }, + "osx": { + "program": "./.debugger/OpenDebugAD7" + }, + "linux": { + "program": "./.debugger/OpenDebugAD7" + } } ] } -} +} \ No newline at end of file From 166995a2ec12769ad682daf8e4ea8d9edbbd95a4 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 13:04:01 -0800 Subject: [PATCH 130/192] Add top-level methods and statements and fix issue with implicit lambda parameters --- syntaxes/csharp.tmLanguage.yml | 58 ++++++++++++++- test/syntaxes/expressions.test.syntax.ts | 94 ++++++++++++++++++++++++ 2 files changed, 151 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index db0989f57e..b446705ccd 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -10,17 +10,23 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * Refinement and tests to ensure proper highlighting while typing # * is and as cast expressions # * null propagating operator +# * nameof expressions +# * 'this' modifier for extension methods +# * dynamic keyword +# * field-like events with initializers # * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences # * unsafe code: fixed, sizeof, unsafe blocks, pointer member access # * multi-line XML doc comments +# * #load and #r directives patterns: - include: '#preprocessor' - include: '#comment' - include: '#directives' - include: '#declarations' +- include: '#script-top-level' repository: directives: @@ -36,6 +42,12 @@ repository: - include: '#type-declarations' - include: '#punctuation-semicolon' + script-top-level: + patterns: + - include: '#method-declaration' + - include: '#statement' + - include: '#punctuation-semicolon' + type-declarations: patterns: - include: '#preprocessor' @@ -1959,7 +1971,7 @@ repository: '1': { name: storage.modifier.cs } '2': patterns: - - include: '#parenthesized-parameter-list' + - include: '#lambda-parameter-list' '3': { name: keyword.operator.arrow.cs } end: (?=\)|;) patterns: @@ -1978,6 +1990,50 @@ repository: - include: '#block' - include: '#expression' + lambda-parameter-list: + begin: (\() + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: (\)) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } + patterns: + - include: '#comment' + - include: '#attribute-section' + - include: '#lambda-parameter' + - include: '#punctuation-comma' + + lambda-parameter: + match: |- + (?x) + (ref|out)?\s* + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )? + \b(\g)\b\s* + (?=[,)]) + captures: + '1': { name: storage.modifier.cs } + '2': + patterns: + - include: '#type' + # '3': ? is a sub-expression. It's final value is not considered. + # '4': ? is a sub-expression. It's final value is not considered. + # '5': ? is a sub-expression. It's final value is not considered. + # '6': ? is a sub-expression. It's final value is not considered. + '7': { name: entity.name.variable.parameter.cs } + type: name: meta.type.cs patterns: diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 3bbec64b1f..ae08ed53e7 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -69,6 +69,31 @@ describe("Grammar", () => { ]); }); + it("lambda expression with multiple parameters (assignment)", () => { + const input = Input.InMethod(`Action a = (x, y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Action"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.TypeParameters.End, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + it("lambda expression with multiple typed parameters (assignment)", () => { const input = Input.InMethod(`Action a = (int x, int y) => { };`); const tokens = tokenize(input); @@ -194,6 +219,34 @@ describe("Grammar", () => { ]); }); + it("async lambda expression with multiple parameters (assignment)", () => { + const input = Input.InMethod(`Func a = async (x, y) => { };`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("Func"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Punctuation.Comma, + Token.Type("Task"), + Token.Punctuation.TypeParameters.End, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.Semicolon + ]); + }); + it("anonymous method with no parameter list (assignment)", () => { const input = Input.InMethod(`Action a = delegate { };`); const tokens = tokenize(input); @@ -366,6 +419,26 @@ describe("Grammar", () => { ]); }); + it("lambda expression with multiple parameters (passed as argument)", () => { + const input = Input.InMethod(`M((x, y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.OpenParen, + Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + it("lambda expression with multiple typed parameters (passed as argument)", () => { const input = Input.InMethod(`M((int x, int y) => { });`); const tokens = tokenize(input); @@ -443,6 +516,27 @@ describe("Grammar", () => { ]); }); + it("async lambda expression with multiple parameters (passed as argument)", () => { + const input = Input.InMethod(`M(async (x, y) => { });`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.Async, + Token.Punctuation.OpenParen, + Token.Identifiers.ParameterName("x"), + Token.Punctuation.Comma, + Token.Identifiers.ParameterName("y"), + Token.Punctuation.CloseParen, + Token.Operators.Arrow, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + it("async lambda expression with multiple typed parameters (passed as argument)", () => { const input = Input.InMethod(`M(async (int x, int y) => { });`); const tokens = tokenize(input); From 3e3148b5e570a8267d3cef41146fb899fae834a8 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 13:05:25 -0800 Subject: [PATCH 131/192] Tweak tuple type rule slightly --- syntaxes/csharp.tmLanguage.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index b446705ccd..276943d076 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -2046,16 +2046,15 @@ repository: - include: '#type-nullable-suffix' tuple-type: + begin: \( + beginCaptures: + '0': { name: punctuation.parenthesis.open.cs } + end: \) + endCaptures: + '0': { name: punctuation.parenthesis.close.cs } patterns: - - begin: \( - beginCaptures: - '0': { name: punctuation.parenthesis.open.cs } - end: \) - endCaptures: - '0': { name: punctuation.parenthesis.close.cs } - patterns: - - include: '#tuple-element' - - include: '#punctuation-comma' + - include: '#tuple-element' + - include: '#punctuation-comma' tuple-element: match: |- From 2024390f82aa72451f2aec82ede2e713537e4535 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 13:21:35 -0800 Subject: [PATCH 132/192] Add support for compound operators and 'this' keyword on extension methods --- syntaxes/csharp.tmLanguage.yml | 8 ++++--- test/syntaxes/methods.test.syntax.ts | 18 +++++++++++++++ test/syntaxes/utils/tokenize.ts | 33 ++++++++++++++++++++++------ 3 files changed, 49 insertions(+), 10 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 276943d076..b686c3a8a7 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -11,10 +11,8 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * is and as cast expressions # * null propagating operator # * nameof expressions -# * 'this' modifier for extension methods # * dynamic keyword # * field-like events with initializers -# * compound assignement # * verbatim identifiers # * hexadecimal and unicode character escape sequences # * unsafe code: fixed, sizeof, unsafe blocks, pointer member access @@ -1433,6 +1431,10 @@ repository: expression-operators: patterns: + - name: keyword.operator.assignment.compound.cs + match: \*=|/=|%=|\+=|-= + - name: keyword.operator.assignment.compound.bitwise.cs + match: \&=|\^=|<<=|>>=|\|= - name: keyword.operator.bitwise.shift.cs match: <<|>> - name: keyword.operator.comparison.cs @@ -1719,7 +1721,7 @@ repository: - include: '#comment' - include: '#attribute-section' - name: storage.modifier.cs - match: \b(ref|params|out)\b + match: \b(ref|params|out|this)\b # parameter name - match: \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,)]) captures: diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index c0e1d91da0..f3ba86ae3d 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -197,6 +197,24 @@ int Add(int x, int y) ]); }); + it("extension method", () => { + const input = Input.InClass(`public void M(this object o) { }`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Public, + Token.PrimitiveType.Void, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Keywords.Modifiers.This, + Token.PrimitiveType.Object, + Token.Identifiers.ParameterName("o"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Punctuation.CloseBrace + ]); + }); + it("commented parameters are highlighted properly (issue #802)", () => { const input = Input.InClass(`public void methodWithParametersCommented(int p1, /*int p2*/, int p3) {}`); const tokens = tokenize(input); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index bc4758f375..e04574f74c 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -236,6 +236,7 @@ export namespace Token { export const Ref = createToken('ref', 'storage.modifier.cs'); export const Sealed = createToken('sealed', 'storage.modifier.cs'); export const Static = createToken('static', 'storage.modifier.cs'); + export const This = createToken('this', 'storage.modifier.cs'); export const Unsafe = createToken('unsafe', 'storage.modifier.cs'); export const Virtual = createToken('virtual', 'storage.modifier.cs'); } @@ -344,18 +345,36 @@ export namespace Token { export const Or = createToken('|', 'keyword.operator.bitwise.cs'); export const ShiftLeft = createToken('<<', 'keyword.operator.bitwise.shift.cs'); export const ShiftRight = createToken('>>', 'keyword.operator.bitwise.shift.cs'); - } + } + + export namespace CompoundAssignment { + export namespace Arithmetic { + export const Addition = createToken('+=', 'keyword.operator.assignment.compound.ts'); + export const Division = createToken('/=', 'keyword.operator.assignment.compound.ts'); + export const Multiplication = createToken('*=', 'keyword.operator.assignment.compound.ts'); + export const Remainder = createToken('%=', 'keyword.operator.assignment.compound.ts'); + export const Subtraction = createToken('-=', 'keyword.operator.assignment.compound.ts'); + } + + export namespace Bitwise { + export const And = createToken('&=', 'keyword.operator.assignment.compound.bitwise.ts'); + export const ExclusiveOr = createToken('^=', 'keyword.operator.assignment.compound.bitwise.ts'); + export const Or = createToken('|=', 'keyword.operator.assignment.compound.bitwise.ts'); + export const ShiftLeft = createToken('<<=', 'keyword.operator.assignment.compound.bitwise.ts'); + export const ShiftRight = createToken('>>=', 'keyword.operator.assignment.compound.bitwise.ts'); + } + } - export namespace Conditional { + export namespace Conditional { export const QuestionMark = createToken('?', 'keyword.operator.conditional.question-mark.cs'); export const Colon = createToken(':', 'keyword.operator.conditional.colon.cs'); - } + } export namespace Logical { export const And = createToken('&&', 'keyword.operator.logical.cs'); export const Not = createToken('!', 'keyword.operator.logical.cs'); export const Or = createToken('||', 'keyword.operator.logical.cs'); - } + } export namespace Relational { export const Equals = createToken('==', 'keyword.operator.comparison.cs'); @@ -365,7 +384,7 @@ export namespace Token { export const LessThanOrEqual = createToken('<=', 'keyword.operator.relational.cs'); export const GreaterThan = createToken('>', 'keyword.operator.relational.cs'); export const GreaterThanOrEqual = createToken('>=', 'keyword.operator.relational.cs'); - } + } export const Arrow = createToken('=>', 'keyword.operator.arrow.cs'); export const Assignment = createToken('=', 'keyword.operator.assignment.cs'); @@ -394,12 +413,12 @@ export namespace Token { } export namespace Punctuation { - export namespace Char { + export namespace Char { export const Begin = createToken('\'', 'punctuation.definition.char.begin.cs'); export const End = createToken('\'', 'punctuation.definition.char.end.cs'); } - export namespace Interpolation { + export namespace Interpolation { export const Begin = createToken('{', 'punctuation.definition.interpolation.begin.cs'); export const End = createToken('}', 'punctuation.definition.interpolation.end.cs'); } From 057a85477bfc00e74d9930ea86ad77f3941822cc Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 13:27:25 -0800 Subject: [PATCH 133/192] Add support for nameof expressions --- syntaxes/csharp.tmLanguage.yml | 13 ++++++++++++- test/syntaxes/expressions.test.syntax.ts | 19 +++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index b686c3a8a7..bb8af7faa0 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -10,7 +10,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * Refinement and tests to ensure proper highlighting while typing # * is and as cast expressions # * null propagating operator -# * nameof expressions # * dynamic keyword # * field-like events with initializers # * verbatim identifiers @@ -140,6 +139,7 @@ repository: - include: '#comment' - include: '#checked-unchecked-expression' - include: '#typeof-or-default-expression' + - include: '#nameof-expression' - include: '#interpolated-string' - include: '#verbatim-interpolated-string' - include: '#literal' @@ -1309,6 +1309,17 @@ repository: patterns: - include: '#type' + nameof-expression: + begin: (? { + it("in assignment", () => { + const input = Input.InMethod(`const int x = nameof(x);`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Modifiers.Const, + Token.PrimitiveType.Int, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Keywords.NameOf, + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + describe("Null-coalescing Operator", () => { it("in assignment", () => { const input = Input.InMethod(`var y = x ?? new object();`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index e04574f74c..bb52e59afc 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -295,6 +295,7 @@ export namespace Token { export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); export const Lock = createToken('lock', 'keyword.other.lock.cs'); + export const NameOf = createToken('nameof', 'keyword.other.nameof.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); export const New = createToken('new', 'keyword.other.new.cs'); export const Operator = createToken('operator', 'keyword.other.operator-decl.cs'); From 8ebfdf842560b0607dd1a0bc2dda016ae41bbdfc Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 14:29:09 -0800 Subject: [PATCH 134/192] Add support for null-conditional operator --- syntaxes/csharp.tmLanguage.yml | 56 ++++++---- test/syntaxes/expressions.test.syntax.ts | 131 +++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 3 files changed, 168 insertions(+), 20 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index bb8af7faa0..963729f5e0 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -9,7 +9,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # # * Refinement and tests to ensure proper highlighting while typing # * is and as cast expressions -# * null propagating operator # * dynamic keyword # * field-like events with initializers # * verbatim identifiers @@ -144,8 +143,6 @@ repository: - include: '#verbatim-interpolated-string' - include: '#literal' - include: '#this-or-base-expression' - - include: '#conditional-operator' - - include: '#expression-operators' - include: '#query-expression' - include: '#anonymous-method-expression' - include: '#object-creation-expression' @@ -154,6 +151,8 @@ repository: - include: '#member-access-expression' - include: '#invocation-expression' - include: '#element-access-expression' + - include: '#conditional-operator' + - include: '#expression-operators' - include: '#cast-expression' - include: '#parenthesized-expression' - include: '#initializer-expression' @@ -1468,7 +1467,10 @@ repository: match: \?\? conditional-operator: - begin: (?\s*<([^<>]|\g)+>\s*)?\s* # type arguments (?=\() # open paren of argument list beginCaptures: - '1': { name: punctuation.accessor.cs } - '2': { name: entity.name.function.cs } - '3': + '1': { name: keyword.operator.null-conditional.cs } + '2': { name: punctuation.accessor.cs } + '3': { name: entity.name.function.cs } + '4': patterns: - include: '#type-arguments' end: (?<=\)) @@ -1558,12 +1562,16 @@ repository: element-access-expression: begin: |- (?x) - (\.)? # preceding dot + (?:(\?)\s*)? # preceding null-conditional operator? + (?:(\.)\s*)? # preceding dot? ([_[:alpha:]][_[:alnum:]]*)\s* # property name - (?=\[) # open paren of argument list + (?:(\?)\s*)? # null-conditional operator? + (?=\[) # open bracket of argument list beginCaptures: - '1': { name: punctuation.accessor.cs } - '2': { name: variable.other.object.property.cs } + '1': { name: keyword.operator.null-conditional.cs } + '2': { name: punctuation.accessor.cs } + '3': { name: variable.other.object.property.cs } + '4': { name: keyword.operator.null-conditional.cs } end: (?<=\]) patterns: - include: '#bracketed-argument-list' @@ -1572,14 +1580,16 @@ repository: patterns: # An identifier with no type parameters and a dot to the left should # be treated as a property, so long as it isn't followed by a ( or [. - - match: + - match: |- (?x) - (\.)\s* - ([_[:alpha:]][_[:alnum:]]*)\s* - (?![_[:alnum:]]|\(|\[|<) + (?:(\?)\s*)? # preceding null-conditional operator? + (\.)\s* # preceding dot + ([_[:alpha:]][_[:alnum:]]*)\s* # property name + (?![_[:alnum:]]|\(|(\?)?\[|<) # next character is not alpha-numeric, nor a (, [, or <. Also, test for ?[ captures: - '1': { name: punctuation.accessor.cs } - '2': { name: variable.other.object.property.cs } + '1': { name: keyword.operator.null-conditional.cs } + '2': { name: punctuation.accessor.cs } + '3': { name: variable.other.object.property.cs } # An identifier with type parameters should be treated as an object, # regardless of whether there is a dot to the left. - match: |- @@ -1587,7 +1597,10 @@ repository: (\.)?\s* ([_[:alpha:]][_[:alnum:]]*) (?\s*<([^<>]|\g)+>\s*) - (?=\s*\.\s*[_[:alpha:]][_[:alnum:]]*) + (?= + (\s*\?)? + \s*\.\s*[_[:alpha:]][_[:alnum:]]* + ) captures: '1': { name: punctuation.accessor.cs } '2': { name: variable.other.object.cs } @@ -1599,7 +1612,10 @@ repository: - match: |- (?x) ([_[:alpha:]][_[:alnum:]]*) - (?=\s*\.\s*[_[:alpha:]][_[:alnum:]]*) + (?= + (\s*\?)? + \s*\.\s*[_[:alpha:]][_[:alnum:]]* + ) captures: '1': { name: variable.other.object.cs } diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index fce16020dd..8038be1064 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -1062,6 +1062,24 @@ class C ]); }); + it("member", () => { + const input = Input.InMethod(`var a = b.c[0];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + it("read/write array element", () => { const input = Input.InMethod(` object[] a1 = {(null), (this.a), c}; @@ -1443,6 +1461,119 @@ a1[1] = ((this.a)); a1[2] = (c); a1[1] = (i); }); }); + describe("Null-conditional Operator", () => { + it("before dot 1", () => { + const input = Input.InMethod(`var a = b?.c;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Operators.NullConditional, + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Punctuation.Semicolon + ]); + }); + + it("before dot 2", () => { + const input = Input.InMethod(`var a = b.c?.d;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Operators.NullConditional, + Token.Punctuation.Accessor, + Token.Variables.Property("d"), + Token.Punctuation.Semicolon + ]); + }); + + it("in element access 1", () => { + const input = Input.InMethod(`var a = b.c?[0];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Operators.NullConditional, + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("in element access 2", () => { + const input = Input.InMethod(`var a = b.c?.d?[0];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Operators.NullConditional, + Token.Punctuation.Accessor, + Token.Variables.Property("d"), + Token.Operators.NullConditional, + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("before element access", () => { + const input = Input.InMethod(`var a = b.c[0];`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Punctuation.Accessor, + Token.Variables.Property("c"), + Token.Punctuation.OpenBracket, + Token.Literals.Numeric.Decimal("0"), + Token.Punctuation.CloseBracket, + Token.Punctuation.Semicolon + ]); + }); + + it("before invocation", () => { + const input = Input.InMethod(`var a = b?.c());`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("a"), + Token.Operators.Assignment, + Token.Variables.Object("b"), + Token.Operators.NullConditional, + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("c"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + describe("Primary", () => { it("default", () => { const input = Input.InMethod(`var t = default(List<>);`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index bb52e59afc..3f51e74dbc 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -392,6 +392,7 @@ export namespace Token { export const Decrement = createToken('--', 'keyword.operator.decrement.cs'); export const Increment = createToken('++', 'keyword.operator.increment.cs'); export const NullCoalescing = createToken('??', 'keyword.operator.null-coalescing.cs'); + export const NullConditional = createToken('?', 'keyword.operator.null-conditional.cs'); } export namespace PrimitiveType { From e214cdf2f2024ca55aba7c70e8b5f9a44cac8ed9 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 14:39:17 -0800 Subject: [PATCH 135/192] Tweak to anonymous object creation expressions --- syntaxes/csharp.tmLanguage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 963729f5e0..9e056196a6 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1709,7 +1709,7 @@ repository: begin: \b(new)\b\s*(?=\{|$) beginCaptures: '1': { name: keyword.other.new.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#initializer-expression' From 2f3f41e6cda268af210e5435fe034af9a3520ec7 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 14:47:41 -0800 Subject: [PATCH 136/192] Add regression test for issue #268 --- test/syntaxes/methods.test.syntax.ts | 65 ++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/test/syntaxes/methods.test.syntax.ts b/test/syntaxes/methods.test.syntax.ts index f3ba86ae3d..0595d18dab 100644 --- a/test/syntaxes/methods.test.syntax.ts +++ b/test/syntaxes/methods.test.syntax.ts @@ -544,5 +544,70 @@ public abstract void Notify(PlayerId playerId, ISessionResponse response); //the Token.Comment.SingleLine.Text("the a") ]); }); + + it("value is not incorrectly highlighted (issue #268)", () => { + const input = ` +namespace x { +public class ClassA +{ + public class ClassAa + { + public bool MyMethod(string key, TT value) + { + return someObject.SomeCall(key, value); // on this line, 'value' is highlighted as though it were the keyword being used in a setter + } + } +} +} +`; + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Namespace, + Token.Identifiers.NamespaceName("x"), + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("ClassA"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("T"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.Keywords.Class, + Token.Identifiers.ClassName("ClassAa"), + Token.Punctuation.TypeParameters.Begin, + Token.Identifiers.TypeParameterName("TT"), + Token.Punctuation.TypeParameters.End, + Token.Punctuation.OpenBrace, + Token.Keywords.Modifiers.Public, + Token.PrimitiveType.Bool, + Token.Identifiers.MethodName("MyMethod"), + Token.Punctuation.OpenParen, + Token.PrimitiveType.String, + Token.Identifiers.ParameterName("key"), + Token.Punctuation.Comma, + Token.Type("TT"), + Token.Identifiers.ParameterName("value"), + Token.Punctuation.CloseParen, + Token.Punctuation.OpenBrace, + Token.Keywords.Control.Return, + Token.Variables.Object("someObject"), + Token.Punctuation.Accessor, + Token.Identifiers.MethodName("SomeCall"), + Token.Punctuation.OpenParen, + Token.Variables.ReadWrite("key"), + Token.Punctuation.Comma, + Token.Variables.ReadWrite("value"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.Comment.SingleLine.Start, + Token.Comment.SingleLine.Text(" on this line, 'value' is highlighted as though it were the keyword being used in a setter"), + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace, + Token.Punctuation.CloseBrace + ]); + }); }); }); \ No newline at end of file From 0b2b40209b4328da2adb33a49971a421b1fb4163 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 15:11:59 -0800 Subject: [PATCH 137/192] Add support for await expressions --- syntaxes/csharp.tmLanguage.yml | 10 ++++- test/syntaxes/expressions.test.syntax.ts | 47 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 1 + 3 files changed, 56 insertions(+), 2 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 9e056196a6..af86c8a844 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -11,6 +11,7 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # * is and as cast expressions # * dynamic keyword # * field-like events with initializers +# * predefined types on left-side of invocation expressions # * verbatim identifiers # * hexadecimal and unicode character escape sequences # * unsafe code: fixed, sizeof, unsafe blocks, pointer member access @@ -143,6 +144,9 @@ repository: - include: '#verbatim-interpolated-string' - include: '#literal' - include: '#this-or-base-expression' + - include: '#conditional-operator' + - include: '#expression-operators' + - include: '#await-expression' - include: '#query-expression' - include: '#anonymous-method-expression' - include: '#object-creation-expression' @@ -151,8 +155,6 @@ repository: - include: '#member-access-expression' - include: '#invocation-expression' - include: '#element-access-expression' - - include: '#conditional-operator' - - include: '#expression-operators' - include: '#cast-expression' - include: '#parenthesized-expression' - include: '#initializer-expression' @@ -1479,6 +1481,10 @@ repository: patterns: - include: '#expression' + await-expression: + name: keyword.other.await.cs + match: (?!\.)\b(await)\b + parenthesized-expression: begin: \( beginCaptures: diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 8038be1064..ef78c49e3c 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -726,6 +726,53 @@ describe("Grammar", () => { }); }); + describe("Await", () => { + it("at statement level", () => { + const input = Input.InMethod(`await M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Await, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("in assignment", () => { + const input = Input.InMethod(`var x = await M();`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Keywords.Await, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + + it("passed as argument", () => { + const input = Input.InMethod(`M1(await M2());`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Identifiers.MethodName("M1"), + Token.Punctuation.OpenParen, + Token.Keywords.Await, + Token.Identifiers.MethodName("M2"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon + ]); + }); + }); + describe("Casts", () => { it("cast to built-in type in assignment", () => { const input = Input.InMethod(`var o = (object)42;`); diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 3f51e74dbc..09b3fb689c 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -281,6 +281,7 @@ export namespace Token { export const Add = createToken('add', 'keyword.other.add.cs'); export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); + export const Await = createToken('await', 'keyword.other.await.cs'); export const Base = createToken('base', 'keyword.other.base.cs'); export const Checked = createToken('checked', 'keyword.other.checked.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); From 161ddf2533d922a9d19f667c0b7ac15f9e8f1f7f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 15:18:55 -0800 Subject: [PATCH 138/192] Tweak query expressions slightly --- syntaxes/csharp.tmLanguage.yml | 14 ++++++------ test/syntaxes/expressions.test.syntax.ts | 27 ++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index af86c8a844..135d848fe3 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -1833,7 +1833,7 @@ repository: # '6': ? is a sub-expression. It's final value is not considered. '7': { name: entity.name.variable.range-variable.cs } '8': { name: keyword.query.in.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#query-body' - include: '#expression' @@ -1857,7 +1857,7 @@ repository: '1': { name: keyword.query.let.cs } '2': { name: entity.name.variable.range-variable.cs } '3': { name: keyword.operator.assignment.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#query-body' - include: '#expression' @@ -1868,7 +1868,7 @@ repository: \b(where)\b\s* beginCaptures: '1': { name: keyword.query.where.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#query-body' - include: '#expression' @@ -1904,7 +1904,7 @@ repository: # '6': ? is a sub-expression. It's final value is not considered. '7': { name: entity.name.variable.range-variable.cs } '8': { name: keyword.query.in.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#join-on' - include: '#join-equals' @@ -1935,7 +1935,7 @@ repository: begin: \b(orderby)\b\s* beginCaptures: '1': { name: keyword.query.orderby.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#ordering-direction' - include: '#query-body' @@ -1952,7 +1952,7 @@ repository: begin: \b(select)\b\s* beginCaptures: '1': { name: keyword.query.select.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#query-body' - include: '#expression' @@ -1961,7 +1961,7 @@ repository: begin: \b(group)\b\s* beginCaptures: '1': { name: keyword.query.group.cs } - end: (?=;) + end: (?=;|\)) patterns: - include: '#group-by' - include: '#group-into' diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index ef78c49e3c..920d84d9d5 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -1943,6 +1943,33 @@ var q = from c in customers ]); }); + it("parenthesized", () => { + const input = Input.InMethod(` +var q = (from x in "abc" select x); +string s;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("q"), + Token.Operators.Assignment, + Token.Punctuation.OpenParen, + Token.Keywords.Queries.From, + Token.Identifiers.RangeVariableName("x"), + Token.Keywords.Queries.In, + Token.Punctuation.String.Begin, + Token.Literals.String("abc"), + Token.Punctuation.String.End, + Token.Keywords.Queries.Select, + Token.Variables.ReadWrite("x"), + Token.Punctuation.CloseParen, + Token.Punctuation.Semicolon, + Token.PrimitiveType.String, + Token.Identifiers.LocalName("s"), + Token.Punctuation.Semicolon + ]); + }); + it("highlight complex query properly (issue #1106)", () => { const input = Input.InClass(` private static readonly Parser NodeParser = From 28ff2cf97433bba24393bc854884e15c4f3c7cf9 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 15:45:43 -0800 Subject: [PATCH 139/192] Add support for is and as casts --- syntaxes/csharp.tmLanguage.yml | 51 +++++++++++++- test/syntaxes/expressions.test.syntax.ts | 88 ++++++++++++++++++++++++ test/syntaxes/utils/tokenize.ts | 2 + 3 files changed, 140 insertions(+), 1 deletion(-) diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 135d848fe3..1e1cb27938 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -8,7 +8,6 @@ uuid: f7de61e2-bdde-4e2a-a139-8221b179584e # TODO List # # * Refinement and tests to ensure proper highlighting while typing -# * is and as cast expressions # * dynamic keyword # * field-like events with initializers # * predefined types on left-side of invocation expressions @@ -148,6 +147,8 @@ repository: - include: '#expression-operators' - include: '#await-expression' - include: '#query-expression' + - include: '#as-expression' + - include: '#is-expression' - include: '#anonymous-method-expression' - include: '#object-creation-expression' - include: '#array-creation-expression' @@ -1540,6 +1541,54 @@ repository: # '6': ? is a sub-expression. It's final value is not considered. '7': { name: punctuation.parenthesis.close.cs } + as-expression: + match: |- + (?x) + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )? + captures: + '1': { name: keyword.other.as.cs } + '2': + patterns: + - include: '#type' + + is-expression: + match: |- + (?x) + (? + (?: + (?:(?[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (? # identifier + type arguments (if any) + \g\s* + (?\s*<(?:[^<>]|\g)+>\s*)? + ) + (?:\s*\.\s*\g)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?\s*\((?:[^\(\)]|\g)+\)) + )? + captures: + '1': { name: keyword.other.is.cs } + '2': + patterns: + - include: '#type' + this-or-base-expression: match: \b(?:(base)|(this))\b captures: diff --git a/test/syntaxes/expressions.test.syntax.ts b/test/syntaxes/expressions.test.syntax.ts index 920d84d9d5..0df701e48d 100644 --- a/test/syntaxes/expressions.test.syntax.ts +++ b/test/syntaxes/expressions.test.syntax.ts @@ -843,6 +843,94 @@ describe("Grammar", () => { Token.Punctuation.Semicolon ]); }); + + it("as cast of identifier", () => { + const input = Input.InMethod(`var x = o as List>;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("o"), + Token.Keywords.As, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Lazy"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.String, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon + ]); + }); + + it("as cast of invocation", () => { + const input = Input.InMethod(`var x = M() as List>;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Keywords.As, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Lazy"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.String, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon + ]); + }); + + it("is cast of identifier", () => { + const input = Input.InMethod(`var x = o is List>;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Variables.ReadWrite("o"), + Token.Keywords.Is, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Lazy"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.String, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon + ]); + }); + + it("is cast of invocation", () => { + const input = Input.InMethod(`var x = M() is List>;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Keywords.Var, + Token.Identifiers.LocalName("x"), + Token.Operators.Assignment, + Token.Identifiers.MethodName("M"), + Token.Punctuation.OpenParen, + Token.Punctuation.CloseParen, + Token.Keywords.Is, + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Type("Lazy"), + Token.Punctuation.TypeParameters.Begin, + Token.PrimitiveType.String, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.TypeParameters.End, + Token.Punctuation.Semicolon + ]); + }); }); describe("Checked/Unchecked", () => { diff --git a/test/syntaxes/utils/tokenize.ts b/test/syntaxes/utils/tokenize.ts index 09b3fb689c..920f2357f4 100644 --- a/test/syntaxes/utils/tokenize.ts +++ b/test/syntaxes/utils/tokenize.ts @@ -282,6 +282,7 @@ export namespace Token { export const Alias = createToken('alias', 'keyword.other.alias.cs'); export const AttributeSpecifier = (text: string) => createToken(text, 'keyword.other.attribute-specifier.cs'); export const Await = createToken('await', 'keyword.other.await.cs'); + export const As = createToken('as', 'keyword.other.as.cs'); export const Base = createToken('base', 'keyword.other.base.cs'); export const Checked = createToken('checked', 'keyword.other.checked.cs'); export const Class = createToken('class', 'keyword.other.class.cs'); @@ -295,6 +296,7 @@ export namespace Token { export const Get = createToken('get', 'keyword.other.get.cs'); export const Implicit = createToken('implicit', 'keyword.other.implicit.cs'); export const Interface = createToken('interface', 'keyword.other.interface.cs'); + export const Is = createToken('is', 'keyword.other.is.cs'); export const Lock = createToken('lock', 'keyword.other.lock.cs'); export const NameOf = createToken('nameof', 'keyword.other.nameof.cs'); export const Namespace = createToken('namespace', 'keyword.other.namespace.cs'); From 3873cc6b5e8ad1ec9910c8b6ca3412fa4ac70119 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 15:46:04 -0800 Subject: [PATCH 140/192] Delete old grammar --- syntaxes/csharp.json | 814 ------------------------------------------- 1 file changed, 814 deletions(-) delete mode 100644 syntaxes/csharp.json diff --git a/syntaxes/csharp.json b/syntaxes/csharp.json deleted file mode 100644 index a3b1af6104..0000000000 --- a/syntaxes/csharp.json +++ /dev/null @@ -1,814 +0,0 @@ -{ - "scopeName": "source.cs", - "name": "C#", - "fileTypes": [ - "cs" - ], - "foldingStartMarker": "^\\s*#\\s*region|^\\s*/\\*|^(?![^{]*?//|[^{]*?/\\*(?!.*?\\*/.*?\\{)).*?\\{\\s*($|//|/\\*(?!.*?\\*/.*\\S))", - "foldingStopMarker": "^\\s*#\\s*endregion|^\\s*\\*/|^\\s*\\}", - "patterns": [ - { - "include": "#using" - }, - { - "include": "#namespace" - }, - { - "include": "#code" - } - ], - "repository": { - "using": { - "begin": "^\\s*(using)\\b\\s*", - "captures": { - "1": { - "name": "keyword.other.using.cs" - } - }, - "end": "\\s*(?:$|;)" - }, - "namespace": { - "begin": "^\\s*[^@]?((namespace)\\s+([\\w.]+))", - "beginCaptures": { - "1": { - "name": "meta.namespace.identifier.cs" - }, - "2": { - "name": "keyword.other.namespace.cs" - }, - "3": { - "name": "entity.name.type.namespace.cs" - } - }, - "end": "}", - "endCaptures": { - "0": { - "name": "punctuation.section.namespace.end.cs" - } - }, - "name": "meta.namespace.cs", - "patterns": [ - { - "begin": "{", - "beginCaptures": { - "0": { - "name": "punctuation.section.namespace.begin.cs" - } - }, - "end": "(?=})", - "name": "meta.namespace.body.cs", - "patterns": [ - { - "include": "#using" - }, - { - "include": "#namespace" - }, - { - "include": "#code" - } - ] - } - ] - }, - "field-declaration": { - "patterns": [ - { - "begin": "(?=(?:(?:(?:private|public|volatile|internal|protected|static|readonly|const|event)\\s*)*)(?:[\\w\\s,<>\\[\\]]+?)(?:[\\w]+)\\s*(?:;|=|=>))", - "end": "(?=;)", - "patterns": [ - { - "match": "^\\s*((?:(?:private|public|volatile|internal|protected|static|readonly|const|event)\\s*)*)\\s*(.+?)\\s*([\\w]+)\\s*(?=;|=)", - "captures": { - "1" : { - "patterns": [ - { - "include": "#storage-modifiers" - } - ] - }, - "2" : { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "name": "entity.name.variable.cs" - } - } - }, - { - "begin": "(?==>?)", - "end": "(?=;|$)", - "patterns": [ - { - "include": "#code" - } - ] - } - ] - } - ] - }, - "variable": { - "patterns": [ - { - "match": "^\\s*\\b(var)\\s+(.*?)(?=(=|;))", - "captures": { - "1": { - "name": "keyword.other.var.cs" - } - } - }, - { - "match": "^\\s*\\b(?!var|return|yield|throw)([\\w<>*?\\[\\]]+)\\s+([\\w]+)\\s*(?=(=(?!=)|;))", - "captures": { - "1": { - "name": "storage.type.variable.cs" - } - } - } - ] - }, - "block": { - "patterns": [ - { - "begin": "{", - "beginCaptures": { - "0": { - "name": "punctuation.section.block.begin.cs" - } - }, - "end": "}", - "endCaptures": { - "0": { - "name": "punctuation.section.block.end.cs" - } - }, - "name": "meta.block.cs", - "patterns": [ - { - "include": "#code" - } - ] - } - ] - }, - "builtinTypes": { - "patterns": [ - { - "match": "\\b(bool|byte|sbyte|char|decimal|double|float|int|uint|long|ulong|object|short|ushort|string|void|class|struct|enum|interface)\\b", - "name": "storage.type.cs" - } - ] - }, - "type": { - "patterns": [ - { - "match": "([\\w\\.]+\\s*<(?:[\\w\\s,\\.`\\[\\]\\*]+)+>(?:\\s*\\[\\s*\\])?)", - "comment": "generic type", - "captures": { - "1": { - "name": "storage.type.cs" - } - } - }, - { - "match": "\\b([a-zA-Z]+[\\w\\.]*\\b(?:\\s*\\[\\s*\\])?\\*?)", - "comment": "non-generic type", - "captures": { - "1": { - "name": "storage.type.cs" - } - } - } - ] - }, - "generic-constraints": { - "begin": "(where)\\s+(\\w+)\\s*:", - "end": "(?=where|{|$)", - "beginCaptures": { - "1": { - "name": "keyword.other.cs" - }, - "2": { - "name": "storage.type.cs" - } - }, - "patterns": [ - { - "match": "\\b(class|struct)\\b", - "name": "keyword.other.cs" - }, - { - "match": "(new)\\s*\\(\\s*\\)", - "captures": { - "1": { - "name": "keyword.other.cs" - } - } - }, - { - "include": "#type" - }, - { - "include": "#generic-constraints" - } - ] - }, - "type-declaration": { - "begin": "(?=\\w?[\\w\\s]*[^@]?(?:class|struct|interface|enum)\\s+\\w+)", - "end": "}", - "endCaptures": { - "0": { - "name": "punctuation.section.class.end.cs" - } - }, - "name": "meta.class.cs", - "patterns": [ - { - "include": "#storage-modifiers" - }, - { - "include": "#comments" - }, - { - "begin": "(class|struct|interface|enum)\\s+", - "end": "(?={|:|$|where)", - "name": "meta.class.identifier.cs", - "beginCaptures": { - "1": { - "name": "storage.modifier.cs" - } - }, - "patterns": [ - { - "include": "#type" - } - ] - }, - { - "begin": ":", - "end": "(?={|where)", - "patterns": [ - { - "include": "#type" - }, - { - "match": "([\\w<>]+)\\s*", - "captures": { - "1": { - "name": "storage.type.cs" - } - } - } - ] - }, - { - "include": "#generic-constraints" - }, - { - "begin": "{", - "beginCaptures": { - "0": { - "name": "punctuation.section.class.begin.cs" - } - }, - "end": "(?=})", - "name": "meta.class.body.cs", - "patterns": [ - { - "include": "#type-body" - } - ] - } - ] - }, - "type-body": { - "patterns": [ - { - "include": "type-declaration" - }, - { - "include": "#field-declaration" - }, - { - "include": "#property-declaration" - }, - { - "include": "#method" - }, - { - "include": "#storage-modifiers" - }, - { - "include": "#code" - } - ] - }, - "code": { - "patterns": [ - { - "include": "#block" - }, - { - "include": "#comments" - }, - { - "include": "#type-declaration" - }, - { - "include": "#variable" - }, - { - "include": "#constants" - }, - { - "include": "#storage-modifiers" - }, - { - "include": "#keywords" - }, - { - "include": "#preprocessor" - }, - { - "include": "#method-call" - }, - { - "include": "#builtinTypes" - }, - { - "include": "#documentation" - } - ] - }, - "comments": { - "patterns": [ - { - "begin": "///", - "captures": { - "0": { - "name": "punctuation.definition.comment.cs" - } - }, - "end": "$\\n?", - "name": "comment.block.documentation.cs", - "patterns": [ - { - "include": "text.xml" - } - ] - }, - { - "begin": "/\\*", - "captures": { - "0": { - "name": "punctuation.definition.comment.cs" - } - }, - "end": "\\*/\\n?", - "name": "comment.block.cs" - }, - { - "begin": "//", - "captures": { - "1": { - "name": "punctuation.definition.comment.cs" - } - }, - "end": "$\\n?", - "name": "comment.line.double-slash.cs" - } - ] - }, - "string-interpolated": { - "patterns": [ - { - "begin": "\\$\"", - "end": "\"|$", - "beginCaptures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "endCaptures": { - "0": { - "name": "punctuation.definition.string.end.cs" - } - }, - "patterns": [ - { - "begin": "([^{}]+?)(?={|\"|$)", - "end": "(?={|\"|$)", - "beginCaptures": { - "1": { - "name": "string.quoted.double.cs" - } - } - }, - { - "begin": "{", - "end": "}", - "name": "meta.interpolated.expression.cs", - "patterns":[ - { - "include":"#code" - } - ] - }, - { - "begin": "([^{}]+?)(?={|\"|$)", - "end": "(?={|\"|$)", - "beginCaptures": { - "1": { - "name": "string.quoted.double.cs" - } - } - } - ] - } - ] - }, - "string-interpolated-verbatim": { - "patterns": [ - { - "begin": "\\$@\"", - "end": "\"", - "beginCaptures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "endCaptures": { - "0": { - "name": "punctuation.definition.string.end.cs" - } - }, - "patterns": [ - { - "begin": "([^{}]+?)(?={|\"|$)", - "end": "(?={|\"|$)", - "beginCaptures": { - "1": { - "name": "string.quoted.double.literal.cs" - } - } - }, - { - "begin": "{", - "end": "}", - "name": "meta.interpolated.expression.cs", - "patterns":[ - { - "include":"#code" - } - ] - }, - { - "begin": "([^{}]+?)(?={|\"|$)", - "end": "(?={|\"|$)", - "beginCaptures": { - "1": { - "name": "string.quoted.double.literal.cs" - } - } - } - ] - } - ] - }, - "string": { - "patterns":[ - { - "include": "#string-interpolated-verbatim" - }, - { - "include": "#string-interpolated" - }, - { - "captures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "match": "@\"([^\"]|\"\")*\"", - "name": "string.quoted.double.literal.cs" - }, - { - "begin": "\"", - "beginCaptures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "end": "\"", - "endCaptures": { - "0": { - "name": "punctuation.definition.string.end.cs" - } - }, - "name": "string.quoted.double.cs", - "patterns": [ - { - "match": "\\\\.", - "name": "constant.character.escape.cs" - } - ] - }, - { - "begin": "'", - "beginCaptures": { - "0": { - "name": "punctuation.definition.string.begin.cs" - } - }, - "end": "'", - "endCaptures": { - "0": { - "name": "punctuation.definition.string.end.cs" - } - }, - "name": "string.quoted.single.cs", - "patterns": [ - { - "match": "\\\\.", - "name": "constant.character.escape.cs" - } - ] - } - ] - }, - "constants": { - "patterns": [ - { - "match": "\\b(true|false|null|this|base)\\b", - "name": "constant.language.cs" - }, - { - "match": "\\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\\.?[0-9]*)|(\\.[0-9]+))((e|E)(\\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\\b", - "name": "constant.numeric.cs" - }, - { - "include": "#string" - } - ] - }, - "keywords": { - "patterns": [ - { - "match": "\\b(if|else|while|for|foreach|in|do|return|continue|break|switch|case|default|goto|throw|try|catch|finally|lock|yield|await|when)\\b", - "name": "keyword.control.cs" - }, - { - "match": "\\b(from|where|select|group|into|orderby|join|let|on|equals|by|ascending|descending)\\b", - "name": "keyword.linq.cs" - }, - { - "match": "\\b(event|delegate|fixed|add|remove|set|get|value|new|is|as|using|checked|unchecked|typeof|sizeof|stackalloc|nameof)\\b", - "name": "keyword.other.cs" - }, - { - "match": "[@]\\b(namespace|class|var|event|delegate|add|remove|set|get|value|new|is|as|using|checked|unchecked|typeof|sizeof|nameof|when|override|readonly|stackalloc|from|where|select|group|into|orderby|join|let|on|equals|by|ascending|descending|if|else|while|for|foreach|in|do|return|continue|break|switch|case|default|goto|throw|try|catch|finally|lock|yield|await|internal|public|protected|private|static|const|sealed|abstract|virtual|extern|unsafe|volatile|implicit|explicit|operator|async|partial|bool|byte|sbyte|char|decimal|double|float|int|uint|long|ulong|object|short|ushort|string|void|struct|enum|interface)\\b", - "name": "meta.class.body.cs" - } - ] - }, - "attribute": { - "begin": "\\[", - "end": "\\]", - "name": "meta.method.annotation.cs", - "patterns": [ - { - "include": "#constants" - }, - { - "include": "#preprocessor" - }, - { - "include": "#builtinTypes" - } - ] - }, - "property-declaration": { - "begin": "^\\s*(?!.*\\b(?:class|interface|struct)\\b)((?:\\w+\\s+)*?)(?!(?:private|public|internal|protected|static|new|virtual|override))(\\w.+?)\\s+(\\w+)\\s*(?={|$)", - "end": "}|;|$", - "beginCaptures": { - "1" : { - "patterns": [ - { - "include": "#storage-modifiers" - } - ] - }, - "2" : { - "patterns": [ - { - "include": "#type" - } - ] - }, - "3": { - "name": "entity.name.function.cs" - } - }, - "name": "meta.property.cs", - "patterns": [ - { - "include": "#block" - }, - { - "begin": "=", - "end": "(?=;)", - "patterns":[ - { - "include": "#code" - } - ] - } - ] - }, - "method": { - "patterns": [ - { - "include": "attribute" - }, - { - "begin": "(?=\\bnew\\s+)(?=[\\w<].*\\s+)(?=[^=]+\\()", - "end": "(?={|;)", - "name": "meta.new-object.cs", - "patterns": [ - { - "include": "#code" - } - ] - }, - { - "begin": "(?\\s,`?]*>)?)\\s*\\(", - "beginCaptures": { - "1": { - "name": "entity.name.function.cs" - } - }, - "end": "\\)", - "name": "meta.method.identifier.cs", - "patterns": [ - { - "include": "#parameters" - }, - { - "include": "#constants" - } - ] - }, - { - "begin": "(?=\\w.*\\s+[\\w.]+\\s*\\()", - "end": "(?=[\\w.]+\\s*\\()", - "name": "meta.method.return-type.cs", - "patterns": [ - { - "include": "#builtinTypes" - } - ] - }, - { - "begin": ":\\s*(this|base)\\s*\\(", - "beginCaptures": { - "1": { - "name": "constant.language.cs" - } - }, - "end": "\\)", - "name": "meta.method.base-call.cs", - "patterns": [ - { - "include": "#builtinTypes" - } - ] - }, - { - "begin": "=>", - "beginCaptures": { - "0": { - "name": "punctuation.section.method.begin.cs" - } - }, - "end": "(?=;)", - "name": "meta.method.body.cs", - "patterns": [ - { - "include": "#code" - } - ] - }, - { - "begin": "{", - "beginCaptures": { - "0": { - "name": "punctuation.section.method.begin.cs" - } - }, - "end": "(?=})", - "name": "meta.method.body.cs", - "patterns": [ - { - "include": "#code" - } - ] - } - ] - } - ] - }, - "method-call": { - "begin": "([\\w$]+)\\s*(\\()", - "beginCaptures": { - "1": { - "name": "meta.method.cs" - }, - "2": { - "name": "punctuation.definition.method-parameters.begin.cs" - } - }, - "end": "\\)", - "endCaptures": { - "0": { - "name": "punctuation.definition.method-parameters.end.cs" - } - }, - "name": "meta.method-call.cs", - "patterns": [ - { - "match": ",", - "name": "punctuation.definition.seperator.parameter.cs" - }, - { - "include": "#code" - } - ] - }, - "parameters": { - "begin": "\\b(ref|params|out)?\\s*\\b(\\w+(?:\\s*<.*?>)?(?:\\s*\\*)*(?:\\s*\\?)?(?:\\s*\\[.*?\\])?)\\s+(@?\\w+)\\s*(=)?", - "beginCaptures": { - "1": { - "name": "storage.type.modifier.cs" - }, - "2": { - "name": "storage.type.generic.cs" - }, - "3": { - "name": "variable.parameter.function.cs" - }, - "4": { - "name": "keyword.operator.assignment.cs" - } - }, - "end": "(?:(,)|(?=[\\)]))", - "endCaptures": { - "1": { - "name": "punctuation.definition.separator.parameter.cs" - } - }, - "patterns": [ - { - "include": "#constants" - }, - { - "include": "#block" - } - ] - }, - "preprocessor": { - "patterns": [ - { - "captures": { - "2": { - "name": "entity.name.function.preprocessor.cs" - } - }, - "match": "^\\s*#\\s*(if|else|elif|endif|define|undef|warning|error|line|pragma|region|endregion)\\b\\s*(.*?)(?=$|\\/\\/)", - "name": "meta.preprocessor.cs" - } - ] - }, - "storage-modifiers": { - "match": "\\b(event|delegate|internal|public|protected|private|static|const|new|sealed|abstract|virtual|override|extern|unsafe|readonly|volatile|implicit|explicit|operator|async|partial)\\b", - "name": "storage.modifier.cs" - } - } -} From 93b3b5460078eadcfae69c5b7ba348ba0e609e39 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 15:47:26 -0800 Subject: [PATCH 141/192] Remove third-party notice for old grammar --- ThirdPartyNotices.txt | 40 ---------------------------------------- 1 file changed, 40 deletions(-) diff --git a/ThirdPartyNotices.txt b/ThirdPartyNotices.txt index 18eea98849..f4d3c8d54b 100644 --- a/ThirdPartyNotices.txt +++ b/ThirdPartyNotices.txt @@ -12,7 +12,6 @@ expressly granted, whether by implication, estoppel or otherwise. 3. run-in-terminal version 0.0.2 (https://github.com/microsoft/run-in-terminal) 4. semver version 5.1.0 (https://github.com/npm/node-semver) 5. DefinitelyTyped version 0.0.1 (https://github.com/borisyankov/DefinitelyTyped) -6. language-csharp version 0.11.0 (https://github.com/atom/language-csharp) %% omnisharp-roslyn NOTICES AND INFORMATION BEGINS HERE ============================================================ @@ -123,42 +122,3 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================================================ END OF DefinitelyTyped NOTICES AND INFORMATION - -%% language-csharp NOTICES AND INFORMATION BEGIN HERE -========================================= -Copyright (c) 2014 GitHub Inc. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -This package was derived from a TextMate bundle located at -https://github.com/wintermi/csharp-tmbundle by Matthew Winter @wintermi and -Adam Lickel @lickel and distributed under the following license, located in -`README.markdown`: - -This bundle is dual-licensed under MIT and GPL licenses. - - - http://www.opensource.org/licenses/mit-license.php - - http://www.gnu.org/licenses/gpl.html - -Use it, change it, fork it, sell it. Do what you will, but please leave the -author attribution. -========================================= -END OF language-csharp NOTICES AND INFORMATION From 8e868b6d77c9e92d3cfc8d5681d6c5c7cb262b29 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 16:25:20 -0800 Subject: [PATCH 142/192] Clean up syntax.md --- syntaxes/syntax.md | 134 --------------------------------------------- 1 file changed, 134 deletions(-) diff --git a/syntaxes/syntax.md b/syntaxes/syntax.md index c282c6d541..bda6f8fb3a 100644 --- a/syntaxes/syntax.md +++ b/syntaxes/syntax.md @@ -23,137 +23,3 @@ (?\s*\((?:[^\(\)]|\g)+\)) ) ``` - -#### Dotted name - -* Expression: `([_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)` -* Matches: `System.Collections.Generic.Dictionary` - -#### Simple generic name - -* Expression: `(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?` -* Matches: `C` - -#### Generic name - -* Expression: `(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?)` -* Matches: `System.Collections.Generic.Dictionary>` - -#### Array suffix - -* Expression: `(?:(?:\[,*\])*)` -* Matches: `[][,][,,]` - -#### Pointer suffix - -* Expression: `(?:(?:\*)*)?` -* Matches: `int*` - -#### Type name - -* Expression: `(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` -* Matches: `System.Collections.Generic.Dictionary, System.List>>` - -#### Delegate declarations - -* Expression: `(?=(?(?:(?:new|public|protected|internal|private)\s+)*)(?(?:\b(?:delegate)))\s+(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*<\s*(?:(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)(?:,\s*(?:(?:in|out)\s+)?[_$[:alpha:]][_$[:alnum:]]*)*\s*>\s*)?))\s*(?:\())` -* Matches: `delegate (int, int) Foo();` - -#### Field declaratiosn - -Note that fields can have multiple declarators with initializers. Our strategy is to match up to the end of the field name. -Further field names are matched by looking for identifiers, #punctuation-comma, and #variable-initializer. - -* Expression: `(?=(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?!=>|==)(?:;|=))` -* Break down: - * Storage modifiers: `(?(?:(?:new|public|protected|internal|private|static|readonly|volatile|const)\s+)*)` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * First field name: `\s+(?[_$[:alpha:]][_$[:alnum:]]*)*)` - * End: `\s*(?!=>)(?:;|=)` - -#### Event declarations - -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s*\b(?event)\b\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)\s*(?:\{|;|$))` -* Break down: - * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)` - * Event keyword: `\s*\b(?event)\b` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` - * Event name(s): `(?[_$[:alpha:]][_$[:alnum:]]*(?:\s*,\s*[_$[:alpha:]][_$[:alnum:]]*)*)` - * End: `\s*(?:\{|;|$)` - -#### Property declarations - -Note that properties can easily match other declarations unintentially. For example, "public class C {" looks a lot like the start of a property -if you consider that regular expressions don't know that "class" is a keyword. To handle this situation, we must use look ahead. - -* Expression: `(?!.*\b(?:class|interface|struct|enum|event)\b)(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\{|=>|$))` -* Break down: - * Don't match other declarations! `(?!.*\b(?:class|interface|struct|enum|event)\b)` - * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern)\b\s*)*)` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` - * Property name: `(?[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\{|=>|$)` - -#### Indexer declarations - -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?this)\s*(?:\[))` -* Break down: - * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|virtual|sealed|override|abstract|extern)\b\s*)*)` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` - * Indexer name: `(?this)` - * End: `\s*(?:\[)` - -#### Method declarations - -* Expression: `(?=(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)\s*(?:\())` - -* Break down: - * Storage modifiers: `(?(?:\b(?:new|public|protected|internal|private|static|virtual|sealed|override|abstract|extern|async|partial)\b\s*)*)` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Interface name: `\s+(?:(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)(\s*\.\s*))?` - * Method name and type parameters: `(?(?[_$[:alpha:]][_$[:alnum:]]*)(?:\s*<\s*\g(?:\s*,\s*\g)*\s*>\s*)?)` - * End: `\s*(?:\()` - -#### Constructor declarations - -Note that the match for constructor declarations contains an `|`. This allows for constructors with and without storage modifiers. -If the storage modifiers are optional (i.e. using a `*` rather than a `+`), this match conflicts with fields where there is a modifier -followed by a tuple type (e.g. `private (int, int) x;`). - -* Expression: `(?=(?:(?(?:(?:public|protected|internal|private|extern|static)\s+)+)\s*(?:[_$[:alpha:]][_$[:alnum:]]*)|(?:[_$[:alpha:]][_$[:alnum:]]*))\s*(?:\())` -* Break down: - * Storage modifiers: `(?(?:(?:public|protected|internal|private|extern|static)\s+)*)` - * Name: `\s+[_$[:alpha:]][_$[:alnum:]]*` - * End: `\s*(?:\()` - -#### Destructor declarations - -Note that structs do not allow destructor declarations, but we'll try to highlight them anyway. - -* Expression: `(?=~(?:[_$[:alpha:]][_$[:alnum:]]*)\s*(?:\())` -* Break down: - * Name: `~(?:[_$[:alpha:]][_$[:alnum:]]*)` - * End: `\s*(?:\()` - -#### Operator declarations - -* Expression: `(?=(?(?:(?:public|static|extern)\s+)*)\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s*(?(?:\b(?:operator)))\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s*(?:\())` -* Break down: - * Storage modifiers: `(?(?:(?:public|static|extern)\s+)*)` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * Operator keyword: `\s*(?(?:\b(?:operator)))` - * Operator: `\s*(?(?:\+|-|\*|\/|%|&|\\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))` - * End: `\s*(?:\()` - -#### Conversion operator declarations - -* Expression: `(?=(?(?:(?:public|static|extern)\s+)*)\s*(?(?:\b(?:explicit|implicit)))\s*(?(?:\b(?:operator)))\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)\s*(?:\())` -* Break down: - * Storage modifiers: `(?(?:(?:public|static|extern)\s+)*)` - * Explicit or implicit: `\s*(?(?:\b(?:explicit|implicit)))` - * Operator keyword: `\s*(?(?:\b(?:operator)))` - * Type name: `\s*(?(?:(?:[_$[:alpha:]][_$[:alnum:]]*\s*\:\:\s*)?(?:(?:[_$[:alpha:]][_$[:alnum:]]*(?:\s*\.\s*[_$[:alpha:]][_$[:alnum:]]*)*)(?:\s*<\s*(?:\g)(?:\s*,\s*\g)*\s*>\s*)?(?:(?:\*)*)?(?:(?:\[,*\])*)?(?:\s*\.\s*\g)*)|(?:\s*\(\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?(?:\s*,\s*(?:\g)(?:\s+[_$[:alpha:]][_$[:alnum:]]*)?)*\s*\)\s*))(?:(?:\[,*\])*)?)` - * End: `\s*(?:\()` From b0d0041f5d28d9023f53143ff89471dd497777c3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 16:27:16 -0800 Subject: [PATCH 143/192] Add plist output to source control --- .gitignore | 1 - syntaxes/csharp.tmLanguage | 6525 ++++++++++++++++++++++++++++++++++++ 2 files changed, 6525 insertions(+), 1 deletion(-) create mode 100644 syntaxes/csharp.tmLanguage diff --git a/.gitignore b/.gitignore index 5c2a0dfe6b..75442c2235 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,5 @@ out .vscode-test install.* -syntaxes/csharp.tmLanguage *.vsix diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage new file mode 100644 index 0000000000..e56e7aeb64 --- /dev/null +++ b/syntaxes/csharp.tmLanguage @@ -0,0 +1,6525 @@ + + + + + name + C# + scopeName + source.cs + fileTypes + + cs + + uuid + f7de61e2-bdde-4e2a-a139-8221b179584e + patterns + + + include + #preprocessor + + + include + #comment + + + include + #directives + + + include + #declarations + + + include + #script-top-level + + + repository + + directives + + patterns + + + include + #extern-alias-directive + + + include + #using-directive + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + declarations + + patterns + + + include + #namespace-declaration + + + include + #type-declarations + + + include + #punctuation-semicolon + + + + script-top-level + + patterns + + + include + #method-declaration + + + include + #statement + + + include + #punctuation-semicolon + + + + type-declarations + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #storage-modifier + + + include + #class-declaration + + + include + #delegate-declaration + + + include + #enum-declaration + + + include + #interface-declaration + + + include + #struct-declaration + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + class-members + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #storage-modifier + + + include + #type-declarations + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #field-declaration + + + include + #variable-initializer + + + include + #constructor-declaration + + + include + #destructor-declaration + + + include + #operator-declaration + + + include + #conversion-operator-declaration + + + include + #method-declaration + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + struct-members + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #storage-modifier + + + include + #type-declarations + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #field-declaration + + + include + #variable-initializer + + + include + #constructor-declaration + + + include + #destructor-declaration + + + include + #operator-declaration + + + include + #conversion-operator-declaration + + + include + #method-declaration + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + interface-members + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #event-declaration + + + include + #property-declaration + + + include + #indexer-declaration + + + include + #method-declaration + + + include + #attribute-section + + + include + #punctuation-semicolon + + + + statement + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #while-statement + + + include + #do-statement + + + include + #for-statement + + + include + #foreach-statement + + + include + #if-statement + + + include + #else-part + + + include + #switch-statement + + + include + #goto-statement + + + include + #return-statement + + + include + #break-or-continue-statement + + + include + #throw-statement + + + include + #yield-statement + + + include + #try-statement + + + include + #checked-unchecked-statement + + + include + #lock-statement + + + include + #using-statement + + + include + #labeled-statement + + + include + #local-declaration + + + include + #block + + + include + #expression + + + include + #punctuation-semicolon + + + + expression + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #checked-unchecked-expression + + + include + #typeof-or-default-expression + + + include + #nameof-expression + + + include + #interpolated-string + + + include + #verbatim-interpolated-string + + + include + #literal + + + include + #this-or-base-expression + + + include + #conditional-operator + + + include + #expression-operators + + + include + #await-expression + + + include + #query-expression + + + include + #as-expression + + + include + #is-expression + + + include + #anonymous-method-expression + + + include + #object-creation-expression + + + include + #array-creation-expression + + + include + #anonymous-object-creation-expression + + + include + #member-access-expression + + + include + #invocation-expression + + + include + #element-access-expression + + + include + #cast-expression + + + include + #parenthesized-expression + + + include + #initializer-expression + + + include + #identifier + + + + extern-alias-directive + + begin + \s*(extern)\b\s*(alias)\b\s*([_[:alpha:]][_[:alnum:]]*) + beginCaptures + + 1 + + name + keyword.other.extern.cs + + 2 + + name + keyword.other.alias.cs + + 3 + + name + variable.other.alias.cs + + + end + (?=;) + + using-directive + + patterns + + + begin + \b(using)\b\s+(static)\s+ + beginCaptures + + 1 + + name + keyword.other.using.cs + + 2 + + name + keyword.other.static.cs + + + end + (?=;) + patterns + + + include + #type + + + + + begin + \b(using)\s+(?=([_[:alpha:]][_[:alnum:]]*)\s*=) + beginCaptures + + 1 + + name + keyword.other.using.cs + + 2 + + name + entity.name.type.alias.cs + + + end + (?=;) + patterns + + + include + #comment + + + include + #type + + + include + #operator-assignment + + + + + begin + \b(using)\s* + beginCaptures + + 1 + + name + keyword.other.using.cs + + + end + (?=;) + patterns + + + include + #comment + + + name + entity.name.type.namespace.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #operator-assignment + + + + + + attribute-section + + begin + (\[)(assembly|module|field|event|method|param|property|return|type)?(\:)? + beginCaptures + + 1 + + name + punctuation.squarebracket.open.cs + + 2 + + name + keyword.other.attribute-specifier.cs + + 3 + + name + punctuation.separator.colon.cs + + + end + (\]) + endCaptures + + 1 + + name + punctuation.squarebracket.close.cs + + + patterns + + + include + #comment + + + include + #attribute + + + include + #punctuation-comma + + + + attribute + + patterns + + + include + #type-name + + + include + #attribute-arguments + + + + attribute-arguments + + begin + (\() + beginCaptures + + 1 + + name + punctuation.parenthesis.open.cs + + + end + (\)) + endCaptures + + 1 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #attribute-named-argument + + + include + #expression + + + include + #punctuation-comma + + + + attribute-named-argument + + begin + ([_[:alpha:]][_[:alnum:]]*)\s*(?==) + beginCaptures + + 1 + + name + entity.name.variable.property.cs + + + end + (?=(,|\))) + patterns + + + include + #operator-assignment + + + include + #expression + + + + namespace-declaration + + begin + \b(namespace)\s+ + beginCaptures + + 1 + + name + keyword.other.namespace.cs + + + end + (?<=\}) + patterns + + + include + #comment + + + name + entity.name.type.namespace.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #punctuation-accessor + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #declarations + + + include + #using-directive + + + include + #punctuation-semicolon + + + + + + storage-modifier + + name + storage.modifier.cs + match + (?<!\.)\b(new|public|protected|internal|private|abstract|virtual|override|sealed|static|partial|readonly|volatile|const|extern|async|unsafe)\b + + class-declaration + + begin + (?=\bclass\b) + end + (?<=\}) + patterns + + + begin + (?x) +\b(class)\b\s+ +([_[:alpha:]][_[:alnum:]]*)\s* + beginCaptures + + 1 + + name + keyword.other.class.cs + + 2 + + name + entity.name.type.class.cs + + + end + (?=\{) + patterns + + + include + #type-parameter-list + + + include + #base-types + + + include + #generic-constraints + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #class-members + + + + + include + #preprocessor + + + include + #comment + + + + delegate-declaration + + begin + (?x) +(?:\b(delegate)\b)\s+ +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s+ +(\g<identifier>)\s* +(<([^<>]+)>)?\s* +(?=\() + beginCaptures + + 1 + + name + keyword.other.delegate.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.type.delegate.cs + + 8 + + patterns + + + include + #type-parameter-list + + + + + end + (?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #generic-constraints + + + + enum-declaration + + begin + (?=\benum\b) + end + (?<=\}) + patterns + + + begin + (?=enum) + end + (?=\{) + patterns + + + match + (enum)\s+([_[:alpha:]][_[:alnum:]]*) + captures + + 1 + + name + keyword.other.enum.cs + + 2 + + name + entity.name.type.enum.cs + + + + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{) + patterns + + + include + #type + + + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #preprocessor + + + include + #comment + + + include + #attribute-section + + + include + #punctuation-comma + + + begin + [_[:alpha:]][_[:alnum:]]* + beginCaptures + + 0 + + name + entity.name.variable.enum-member.cs + + + end + (?=(,|\})) + patterns + + + include + #comment + + + include + #variable-initializer + + + + + + + include + #preprocessor + + + include + #comment + + + + interface-declaration + + begin + (?=\binterface\b) + end + (?<=\}) + patterns + + + begin + (?x) +(interface)\b\s+ +([_[:alpha:]][_[:alnum:]]*) + beginCaptures + + 1 + + name + keyword.other.interface.cs + + 2 + + name + entity.name.type.interface.cs + + + end + (?=\{) + patterns + + + include + #type-parameter-list + + + include + #base-types + + + include + #generic-constraints + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #interface-members + + + + + include + #preprocessor + + + include + #comment + + + + struct-declaration + + begin + (?=\bstruct\b) + end + (?<=\}) + patterns + + + begin + (?x) +(struct)\b\s+ +([_[:alpha:]][_[:alnum:]]*) + beginCaptures + + 1 + + name + keyword.other.struct.cs + + 2 + + name + entity.name.type.struct.cs + + + end + (?=\{) + patterns + + + include + #type-parameter-list + + + include + #base-types + + + include + #generic-constraints + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #struct-members + + + + + include + #preprocessor + + + include + #comment + + + + type-parameter-list + + begin + \< + beginCaptures + + 0 + + name + punctuation.definition.typeparameters.begin.cs + + + end + \> + endCaptures + + 0 + + name + punctuation.definition.typeparameters.end.cs + + + patterns + + + match + \b(in|out)\b + captures + + 1 + + name + storage.modifier.cs + + + + + match + \b([_[:alpha:]][_[:alnum:]]*)\b + captures + + 1 + + name + entity.name.type.type-parameter.cs + + + + + include + #comment + + + include + #punctuation-comma + + + include + #attribute-section + + + + base-types + + begin + : + beginCaptures + + 0 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where) + patterns + + + include + #type + + + include + #punctuation-comma + + + + generic-constraints + + begin + (where)\s+([_[:alpha:]][_[:alnum:]]*)\s*(:) + beginCaptures + + 1 + + name + keyword.other.where.cs + + 2 + + name + storage.type.cs + + 3 + + name + punctuation.separator.colon.cs + + + end + (?=\{|where|;) + patterns + + + name + keyword.other.class.cs + match + \bclass\b + + + name + keyword.other.struct.cs + match + \bstruct\b + + + match + (new)\s*(\()\s*(\)) + captures + + 1 + + name + keyword.other.new.cs + + 2 + + name + punctuation.parenthesis.open.cs + + 3 + + name + punctuation.parenthesis.close.cs + + + + + include + #type + + + include + #punctuation-comma + + + include + #generic-constraints + + + + field-declaration + + begin + (?x) +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s+ +(\g<identifier>)\s* # first field name +(?!=>|==)(?=,|;|=) + beginCaptures + + 1 + + patterns + + + include + #type + + + + 6 + + name + entity.name.variable.field.cs + + + end + (?=;) + patterns + + + name + entity.name.variable.field.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + + property-declaration + + begin + (?x) +(?!.*\b(?:class|interface|struct|enum|event)\b)\s* +(?<return-type> + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<property-name>\g<identifier>)\s* +(?=\{|=>|$) + beginCaptures + + 1 + + patterns + + + include + #type + + + + 7 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 8 + + name + entity.name.variable.property.cs + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #property-accessors + + + include + #expression-body + + + include + #variable-initializer + + + + indexer-declaration + + begin + (?x) +(?<return-type> + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<indexer-name>this)\s* +(?=\[) + beginCaptures + + 1 + + patterns + + + include + #type + + + + 7 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 8 + + name + keyword.other.this.cs + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #bracketed-parameter-list + + + include + #property-accessors + + + include + #expression-body + + + include + #variable-initializer + + + + event-declaration + + begin + (?x) +\b(event)\b\s* +(?<return-type> + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(?<event-names>\g<identifier>(?:\s*,\s*\g<identifier>)*)\s* +(?=\{|;|$) + beginCaptures + + 1 + + name + keyword.other.event.cs + + 2 + + patterns + + + include + #type + + + + 8 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 9 + + patterns + + + name + entity.name.variable.event.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #punctuation-comma + + + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #event-accessors + + + include + #punctuation-comma + + + + property-accessors + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + name + storage.modifier.cs + match + \b(private|protected|internal)\b + + + name + keyword.other.get.cs + match + \b(get)\b + + + name + keyword.other.set.cs + match + \b(set)\b + + + include + #attribute-section + + + include + #block + + + include + #punctuation-semicolon + + + + event-accessors + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + name + keyword.other.add.cs + match + \b(add)\b + + + name + keyword.other.remove.cs + match + \b(remove)\b + + + include + #attribute-section + + + include + #block + + + include + #punctuation-semicolon + + + + method-declaration + + begin + (?x) +(?<return-type> + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + )\s+ +) +(?<interface-name>\g<type-name>\s*\.\s*)? +(\g<identifier>)\s* +(<([^<>]+)>)?\s* +(?=\() + beginCaptures + + 1 + + patterns + + + include + #type + + + + 7 + + patterns + + + include + #type + + + include + #punctuation-accessor + + + + 8 + + name + entity.name.function.cs + + 9 + + patterns + + + include + #type-parameter-list + + + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #generic-constraints + + + include + #expression-body + + + include + #block + + + + constructor-declaration + + begin + (?=[_[:alpha:]][_[:alnum:]]*\s*\() + end + (?<=\})|(?=;) + patterns + + + match + \b([_[:alpha:]][_[:alnum:]]*)\b + captures + + 1 + + name + entity.name.function.cs + + + + + begin + (:) + beginCaptures + + 1 + + name + punctuation.separator.colon.cs + + + end + (?=\{|=>) + patterns + + + include + #constructor-initializer + + + + + include + #parenthesized-parameter-list + + + include + #preprocessor + + + include + #comment + + + include + #expression-body + + + include + #block + + + + constructor-initializer + + begin + \b(?:(base)|(this))\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.other.base.cs + + 2 + + name + keyword.other.this.cs + + + end + (?<=\)) + patterns + + + include + #argument-list + + + + destructor-declaration + + begin + (~)([_[:alpha:]][_[:alnum:]]*)\s*(?=\() + beginCaptures + + 1 + + name + punctuation.tilde.cs + + 2 + + name + entity.name.function.cs + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + operator-declaration + + begin + (?x) +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(?<operator-keyword>(?:\b(?:operator)))\s* +(?<operator>(?:\+|-|\*|/|%|&|\||\^|\<\<|\>\>|==|!=|\>|\<|\>=|\<=|!|~|\+\+|--|true|false))\s* +(?=\() + beginCaptures + + 1 + + patterns + + + include + #type + + + + 6 + + name + keyword.other.operator-decl.cs + + 7 + + name + entity.name.function.cs + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + conversion-operator-declaration + + begin + (?x) +(?<explicit-or-implicit-keyword>(?:\b(?:explicit|implicit)))\s* +(?<operator-keyword>(?:\b(?:operator)))\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(?=\() + beginCaptures + + 1 + + patterns + + + match + \b(explicit)\b + captures + + 1 + + name + keyword.other.explicit.cs + + + + + match + \b(implicit)\b + captures + + 1 + + name + keyword.other.implicit.cs + + + + + + 2 + + name + keyword.other.operator-decl.cs + + 3 + + patterns + + + include + #type + + + + + end + (?<=\})|(?=;) + patterns + + + include + #comment + + + include + #parenthesized-parameter-list + + + include + #expression-body + + + include + #block + + + + block + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #statement + + + + variable-initializer + + begin + (?<!=|!)(=)(?!=|>) + beginCaptures + + 1 + + name + keyword.operator.assignment.cs + + + end + (?=[,\);}]) + patterns + + + include + #expression + + + + expression-body + + begin + => + beginCaptures + + 0 + + name + keyword.operator.arrow.cs + + + end + (?=[,\);}]) + patterns + + + include + #expression + + + + goto-statement + + begin + (?<!\.)\b(goto)\b + beginCaptures + + 1 + + name + keyword.control.goto.cs + + + end + (?=;) + patterns + + + begin + \b(case)\b + beginCaptures + + 1 + + name + keyword.control.case.cs + + + end + (?=;) + patterns + + + include + #expression + + + + + match + \b(default)\b + captures + + 1 + + name + keyword.control.default.cs + + + + + name + entity.name.label.cs + match + [_[:alpha:]][_[:alnum:]]* + + + + return-statement + + begin + (?<!\.)\b(return)\b + beginCaptures + + 1 + + name + keyword.control.flow.return.cs + + + end + (?=;) + patterns + + + include + #expression + + + + break-or-continue-statement + + match + (?<!\.)\b(?:(break)|(continue))\b + captures + + 1 + + name + keyword.control.flow.break.cs + + 2 + + name + keyword.control.flow.continue.cs + + + + throw-statement + + begin + (?<!\.)\b(throw)\b + beginCaptures + + 1 + + name + keyword.control.flow.throw.cs + + + end + (?=;) + patterns + + + include + #expression + + + + yield-statement + + patterns + + + include + #yield-return-statement + + + include + #yield-break-statement + + + + yield-return-statement + + begin + (?<!\.)\b(yield)\b\s*\b(return)\b + beginCaptures + + 1 + + name + keyword.control.flow.yield.cs + + 2 + + name + keyword.control.flow.return.cs + + + end + (?=;) + patterns + + + include + #expression + + + + yield-break-statement + + match + (?<!\.)\b(yield)\b\s*\b(break)\b + captures + + 1 + + name + keyword.control.flow.yield.cs + + 2 + + name + keyword.control.flow.break.cs + + + + if-statement + + begin + (?<!\.)\b(if)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.conditional.if.cs + + + end + (?<=\})|(?=;) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + include + #statement + + + + else-part + + begin + (?<!\.)\b(else)\b + beginCaptures + + 1 + + name + keyword.control.conditional.else.cs + + + end + (?<=\})|(?=;) + patterns + + + include + #statement + + + + switch-statement + + begin + (?<!\.)\b(switch)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.switch.cs + + + end + (?<=\}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #switch-label + + + include + #statement + + + + + + switch-label + + patterns + + + begin + (?<!\.)\b(case)\b\s+ + beginCaptures + + 1 + + name + keyword.control.case.cs + + + end + : + endCaptures + + 0 + + name + punctuation.separator.colon.cs + + + patterns + + + include + #expression + + + + + match + (?<!\.)\b(default)\b\s*(:) + captures + + 1 + + name + keyword.control.default.cs + + 2 + + name + punctuation.separator.colon.cs + + + + + + do-statement + + begin + (?<!\.)\b(do)\b + beginCaptures + + 1 + + name + keyword.control.loop.do.cs + + + end + (?=;|}) + patterns + + + include + #statement + + + + while-statement + + begin + (?<!\.)\b(while)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.loop.while.cs + + + end + (?<=\})|(?=;) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + include + #statement + + + + for-statement + + begin + (?<!\.)\b(for)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.loop.for.cs + + + end + (?<=\})|(?=;) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #local-variable-declaration + + + include + #expression + + + include + #punctuation-comma + + + include + #punctuation-semicolon + + + + + include + #statement + + + + foreach-statement + + begin + (?<!\.)\b(foreach)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.control.loop.foreach.cs + + + end + (?<=\})|(?=;) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + match + (?x) +(?: + (\bvar\b)| + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + ) +)\s+ +(\g<identifier>)\s+ +\b(in)\b + captures + + 1 + + name + keyword.other.var.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.local.cs + + 8 + + name + keyword.control.loop.in.cs + + + + + include + #expression + + + + + include + #statement + + + + try-statement + + patterns + + + include + #try-block + + + include + #catch-clause + + + include + #finally-clause + + + + try-block + + begin + (?<!\.)\b(try)\b + beginCaptures + + 1 + + name + keyword.control.try.cs + + + end + (?<=\}) + patterns + + + include + #block + + + + finally-clause + + begin + (?<!\.)\b(finally)\b + beginCaptures + + 1 + + name + keyword.control.try.finally.cs + + + end + (?<=\}) + patterns + + + include + #block + + + + catch-clause + + begin + (?<!\.)\b(catch)\b + beginCaptures + + 1 + + name + keyword.control.try.catch.cs + + + end + (?<=\}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + match + (?x) +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(?:\b(\g<identifier>)\b)? + captures + + 1 + + patterns + + + include + #type + + + + 6 + + name + entity.name.variable.local.cs + + + + + + + include + #when-clause + + + include + #block + + + + when-clause + + begin + (?<!\.)\b(when)\b\s*(\() + beginCaptures + + 1 + + name + keyword.control.try.when.cs + + 2 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + checked-unchecked-statement + + begin + (?<!\.)\b(?:(checked)|(unchecked))\b\s*(?!\() + beginCaptures + + 1 + + name + keyword.other.checked.cs + + 2 + + name + keyword.other.unchecked.cs + + + end + (?<=\}) + patterns + + + include + #block + + + + lock-statement + + begin + (?<!\.)\b(lock)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.other.lock.cs + + + end + (?<=\})|(?=;) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + + include + #statement + + + + using-statement + + begin + (?<!\.)\b(using)\b\s*(?=\() + beginCaptures + + 1 + + name + keyword.other.using.cs + + + end + (?=\;|}) + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #local-variable-declaration + + + include + #expression + + + + + include + #statement + + + + labeled-statement + + match + ([_[:alpha:]][_[:alnum:]]*)\s*(:) + captures + + 1 + + name + entity.name.label.cs + + 2 + + name + punctuation.separator.colon.cs + + + + local-declaration + + patterns + + + include + #local-constant-declaration + + + include + #local-variable-declaration + + + + local-variable-declaration + + begin + (?x) +(?: + (\bvar\b)| + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + ) +)\s+ +(\g<identifier>)\s* +(?=,|;|=|\)) + beginCaptures + + 1 + + name + keyword.other.var.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.local.cs + + + end + (?=;|\)) + patterns + + + name + entity.name.variable.local.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + + local-constant-declaration + + begin + (?x) +(?<const-keyword>\b(?:const)\b)\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s+ +(\g<identifier>)\s* +(?=,|;|=) + beginCaptures + + 1 + + name + storage.modifier.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.local.cs + + + end + (?=;) + patterns + + + name + entity.name.variable.local.cs + match + [_[:alpha:]][_[:alnum:]]* + + + include + #punctuation-comma + + + include + #comment + + + include + #variable-initializer + + + + checked-unchecked-expression + + begin + (?<!\.)\b(?:(checked)|(unchecked))\b\s*(\() + beginCaptures + + 1 + + name + keyword.other.checked.cs + + 2 + + name + keyword.other.unchecked.cs + + 3 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + typeof-or-default-expression + + begin + (?<!\.)\b(?:(typeof)|(default))\b\s*(\() + beginCaptures + + 1 + + name + keyword.other.typeof.cs + + 2 + + name + keyword.other.default.cs + + 3 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #type + + + + nameof-expression + + begin + (?<!\.)\b(nameof)\b\s*(\() + beginCaptures + + 1 + + name + keyword.other.nameof.cs + + 2 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + interpolated-string + + name + string.quoted.double.cs + begin + \$" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + (")|((?:[^\\\n])$) + endCaptures + + 1 + + name + punctuation.definition.string.end.cs + + 2 + + name + invalid.illegal.newline.cs + + + patterns + + + include + #string-character-escape + + + include + #interpolation + + + + verbatim-interpolated-string + + name + string.quoted.double.cs + begin + \$@" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + "(?=[^"]) + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #verbatim-string-character-escape + + + include + #interpolation + + + + interpolation + + name + meta.interpolation.cs + begin + (?<=[^\{])((?:\{\{)*)(\{)(?=[^\{]) + beginCaptures + + 1 + + name + string.quoted.double.cs + + 2 + + name + punctuation.definition.interpolation.begin.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.definition.interpolation.end.cs + + + patterns + + + include + #expression + + + + literal + + patterns + + + include + #boolean-literal + + + include + #null-literal + + + include + #numeric-literal + + + include + #char-literal + + + include + #string-literal + + + include + #verbatim-string-literal + + + + boolean-literal + + patterns + + + name + constant.language.boolean.true.cs + match + (?<!\.)\btrue\b + + + name + constant.language.boolean.false.cs + match + (?<!\.)\bfalse\b + + + + null-literal + + name + constant.language.null.cs + match + (?<!\.)\bnull\b + + numeric-literal + + patterns + + + name + constant.numeric.hex.cs + match + \b0(x|X)[0-9a-fA-F_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + name + constant.numeric.binary.cs + match + \b0(b|B)[01_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + name + constant.numeric.decimal.cs + match + \b([0-9_]+)?\.[0-9_]+((e|E)[0-9]+)?(F|f|D|d|M|m)?\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(e|E)[0-9_]+(F|f|D|d|M|m)?\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(F|f|D|d|M|m)\b + + + name + constant.numeric.decimal.cs + match + \b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\b + + + + char-literal + + name + string.quoted.single.cs + begin + ' + beginCaptures + + 0 + + name + punctuation.definition.char.begin.cs + + + end + (\')|((?:[^\\\n])$) + endCaptures + + 1 + + name + punctuation.definition.char.end.cs + + 2 + + name + invalid.illegal.newline.cs + + + patterns + + + include + #string-character-escape + + + + string-literal + + name + string.quoted.double.cs + begin + (?<!@)" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + (")|((?:[^\\\n])$) + endCaptures + + 1 + + name + punctuation.definition.string.end.cs + + 2 + + name + invalid.illegal.newline.cs + + + patterns + + + include + #string-character-escape + + + + string-character-escape + + name + constant.character.escape.cs + match + \\. + + verbatim-string-literal + + name + string.quoted.double.cs + begin + @" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + "(?=[^"]) + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #verbatim-string-character-escape + + + + verbatim-string-character-escape + + name + constant.character.escape.cs + match + "" + + expression-operators + + patterns + + + name + keyword.operator.assignment.compound.cs + match + \*=|/=|%=|\+=|-= + + + name + keyword.operator.assignment.compound.bitwise.cs + match + \&=|\^=|<<=|>>=|\|= + + + name + keyword.operator.bitwise.shift.cs + match + <<|>> + + + name + keyword.operator.comparison.cs + match + ==|!= + + + name + keyword.operator.relational.cs + match + <=|>=|<|> + + + name + keyword.operator.logical.cs + match + \!|&&|\|\| + + + name + keyword.operator.bitwise.cs + match + \&|~|\^|\| + + + name + keyword.operator.assignment.cs + match + \= + + + name + keyword.operator.decrement.cs + match + -- + + + name + keyword.operator.increment.cs + match + \+\+ + + + name + keyword.operator.arithmetic.cs + match + %|\*|/|-|\+ + + + name + keyword.operator.null-coalescing.cs + match + \?\? + + + + conditional-operator + + begin + (?<!\?)\?(?!\?|\.|\[) + beginCaptures + + 0 + + name + keyword.operator.conditional.question-mark.cs + + + end + : + endCaptures + + 0 + + name + keyword.operator.conditional.colon.cs + + + patterns + + + include + #expression + + + + await-expression + + name + keyword.other.await.cs + match + (?!\.)\b(await)\b + + parenthesized-expression + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #expression + + + + initializer-expression + + begin + \{ + beginCaptures + + 0 + + name + punctuation.curlybrace.open.cs + + + end + \} + endCaptures + + 0 + + name + punctuation.curlybrace.close.cs + + + patterns + + + include + #expression + + + include + #punctuation-comma + + + + identifier + + name + variable.other.readwrite.cs + match + [_[:alpha:]][_[:alnum:]]* + + cast-expression + + match + (?x) +(\()\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(\))(?=\s*[_[:alnum:]\(]) + captures + + 1 + + name + punctuation.parenthesis.open.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + punctuation.parenthesis.close.cs + + + + as-expression + + match + (?x) +(?<!\.)\b(as)\b\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)? + captures + + 1 + + name + keyword.other.as.cs + + 2 + + patterns + + + include + #type + + + + + + is-expression + + match + (?x) +(?<!\.)\b(is)\b\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)? + captures + + 1 + + name + keyword.other.is.cs + + 2 + + patterns + + + include + #type + + + + + + this-or-base-expression + + match + \b(?:(base)|(this))\b + captures + + 1 + + name + keyword.other.base.cs + + 2 + + name + keyword.other.this.cs + + + + invocation-expression + + begin + (?x) +(?:(\?)\s*)? # preceding null-conditional operator? +(?:(\.)\s*)? # preceding dot? +([_[:alpha:]][_[:alnum:]]*)\s* # method name +(?<type-args>\s*<([^<>]|\g<type-args>)+>\s*)?\s* # type arguments +(?=\() # open paren of argument list + beginCaptures + + 1 + + name + keyword.operator.null-conditional.cs + + 2 + + name + punctuation.accessor.cs + + 3 + + name + entity.name.function.cs + + 4 + + patterns + + + include + #type-arguments + + + + + end + (?<=\)) + patterns + + + include + #argument-list + + + + element-access-expression + + begin + (?x) +(?:(\?)\s*)? # preceding null-conditional operator? +(?:(\.)\s*)? # preceding dot? +([_[:alpha:]][_[:alnum:]]*)\s* # property name +(?:(\?)\s*)? # null-conditional operator? +(?=\[) # open bracket of argument list + beginCaptures + + 1 + + name + keyword.operator.null-conditional.cs + + 2 + + name + punctuation.accessor.cs + + 3 + + name + variable.other.object.property.cs + + 4 + + name + keyword.operator.null-conditional.cs + + + end + (?<=\]) + patterns + + + include + #bracketed-argument-list + + + + member-access-expression + + patterns + + + match + (?x) +(?:(\?)\s*)? # preceding null-conditional operator? +(\.)\s* # preceding dot +([_[:alpha:]][_[:alnum:]]*)\s* # property name +(?![_[:alnum:]]|\(|(\?)?\[|<) # next character is not alpha-numeric, nor a (, [, or <. Also, test for ?[ + captures + + 1 + + name + keyword.operator.null-conditional.cs + + 2 + + name + punctuation.accessor.cs + + 3 + + name + variable.other.object.property.cs + + + + + match + (?x) +(\.)?\s* +([_[:alpha:]][_[:alnum:]]*) +(?<type-params>\s*<([^<>]|\g<type-params>)+>\s*) +(?= + (\s*\?)? + \s*\.\s*[_[:alpha:]][_[:alnum:]]* +) + captures + + 1 + + name + punctuation.accessor.cs + + 2 + + name + variable.other.object.cs + + 3 + + patterns + + + include + #type-arguments + + + + + + + match + (?x) +([_[:alpha:]][_[:alnum:]]*) +(?= + (\s*\?)? + \s*\.\s*[_[:alpha:]][_[:alnum:]]* +) + captures + + 1 + + name + variable.other.object.cs + + + + + + object-creation-expression + + patterns + + + include + #object-creation-expression-with-parameters + + + include + #object-creation-expression-with-no-parameters + + + + object-creation-expression-with-parameters + + begin + (?x) +(new)\s+ +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(?=\() + beginCaptures + + 1 + + name + keyword.other.new.cs + + 2 + + patterns + + + include + #type + + + + + end + (?<=\)) + patterns + + + include + #argument-list + + + + object-creation-expression-with-no-parameters + + match + (?x) +(new)\s+ +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)\s* +(?=\{|$) + captures + + 1 + + name + keyword.other.new.cs + + 2 + + patterns + + + include + #type + + + + + + array-creation-expression + + begin + (?x) +\b(new)\b\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)?\s* +(?=\[) + beginCaptures + + 1 + + name + keyword.other.new.cs + + 2 + + patterns + + + include + #type + + + + + end + (?<=\]) + patterns + + + include + #bracketed-argument-list + + + + anonymous-object-creation-expression + + begin + \b(new)\b\s*(?=\{|$) + beginCaptures + + 1 + + name + keyword.other.new.cs + + + end + (?=;|\)) + patterns + + + include + #initializer-expression + + + + bracketed-parameter-list + + begin + (?=(\[)) + beginCaptures + + 1 + + name + punctuation.squarebracket.open.cs + + + end + (?=(\])) + endCaptures + + 1 + + name + punctuation.squarebracket.close.cs + + + patterns + + + begin + (?<=\[) + end + (?=\]) + patterns + + + include + #comment + + + include + #attribute-section + + + name + storage.modifier.cs + match + \b(ref|params|out)\b + + + match + \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,\]]) + captures + + 1 + + name + entity.name.variable.parameter.cs + + + + + include + #variable-initializer + + + include + #type + + + include + #punctuation-comma + + + + + + parenthesized-parameter-list + + begin + (\() + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + (\)) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #attribute-section + + + name + storage.modifier.cs + match + \b(ref|params|out|this)\b + + + match + \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,)]) + captures + + 1 + + name + entity.name.variable.parameter.cs + + + + + include + #variable-initializer + + + include + #type + + + include + #punctuation-comma + + + + argument-list + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #named-argument + + + include + #argument + + + include + #punctuation-comma + + + + bracketed-argument-list + + begin + \[ + beginCaptures + + 0 + + name + punctuation.squarebracket.open.cs + + + end + \] + endCaptures + + 0 + + name + punctuation.squarebracket.close.cs + + + patterns + + + include + #named-argument + + + include + #argument + + + include + #punctuation-comma + + + + named-argument + + begin + ([_[:alpha:]][_[:alnum:]]*)\s*(:) + beginCaptures + + 1 + + name + entity.name.variable.parameter.cs + + 2 + + name + punctuation.separator.colon.cs + + + end + (?=(,|\)|\])) + patterns + + + include + #expression + + + + argument + + patterns + + + name + storage.modifier.cs + match + \b(ref|out)\b + + + include + #expression + + + + query-expression + + begin + (?x) +\b(from)\b\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)? +\b(\g<identifier>)\b\s* +\b(in)\b\s* + beginCaptures + + 1 + + name + keyword.query.from.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.range-variable.cs + + 8 + + name + keyword.query.in.cs + + + end + (?=;|\)) + patterns + + + include + #query-body + + + include + #expression + + + + query-body + + patterns + + + include + #let-clause + + + include + #where-clause + + + include + #join-clause + + + include + #orderby-clause + + + include + #select-clause + + + include + #group-clause + + + + let-clause + + begin + (?x) +\b(let)\b\s* +\b([_[:alpha:]][_[:alnum:]]*)\b\s* +(=)\s* + beginCaptures + + 1 + + name + keyword.query.let.cs + + 2 + + name + entity.name.variable.range-variable.cs + + 3 + + name + keyword.operator.assignment.cs + + + end + (?=;|\)) + patterns + + + include + #query-body + + + include + #expression + + + + where-clause + + begin + (?x) +\b(where)\b\s* + beginCaptures + + 1 + + name + keyword.query.where.cs + + + end + (?=;|\)) + patterns + + + include + #query-body + + + include + #expression + + + + join-clause + + begin + (?x) +\b(join)\b\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)? +\b(\g<identifier>)\b\s* +\b(in)\b\s* + beginCaptures + + 1 + + name + keyword.query.join.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.range-variable.cs + + 8 + + name + keyword.query.in.cs + + + end + (?=;|\)) + patterns + + + include + #join-on + + + include + #join-equals + + + include + #join-into + + + include + #query-body + + + include + #expression + + + + join-on + + match + \b(on)\b\s* + captures + + 1 + + name + keyword.query.on.cs + + + + join-equals + + match + \b(equals)\b\s* + captures + + 1 + + name + keyword.query.equals.cs + + + + join-into + + match + (?x) +\b(into)\b\s* +\b([_[:alpha:]][_[:alnum:]]*)\b\s* + captures + + 1 + + name + keyword.query.into.cs + + 2 + + name + entity.name.variable.range-variable.cs + + + + orderby-clause + + begin + \b(orderby)\b\s* + beginCaptures + + 1 + + name + keyword.query.orderby.cs + + + end + (?=;|\)) + patterns + + + include + #ordering-direction + + + include + #query-body + + + include + #expression + + + include + #punctuation-comma + + + + ordering-direction + + match + \b(?:(ascending)|(descending))\b + captures + + 1 + + name + keyword.query.ascending.cs + + 2 + + name + keyword.query.descending.cs + + + + select-clause + + begin + \b(select)\b\s* + beginCaptures + + 1 + + name + keyword.query.select.cs + + + end + (?=;|\)) + patterns + + + include + #query-body + + + include + #expression + + + + group-clause + + begin + \b(group)\b\s* + beginCaptures + + 1 + + name + keyword.query.group.cs + + + end + (?=;|\)) + patterns + + + include + #group-by + + + include + #group-into + + + include + #query-body + + + include + #expression + + + + group-by + + match + \b(by)\b\s* + captures + + 1 + + name + keyword.query.by.cs + + + + group-into + + match + (?x) +\b(into)\b\s* +\b([_[:alpha:]][_[:alnum:]]*)\b\s* + captures + + 1 + + name + keyword.query.into.cs + + 2 + + name + entity.name.variable.range-variable.cs + + + + anonymous-method-expression + + patterns + + + begin + (?x) +(?:\b(async)\b\s*)? +\b([_[:alpha:]][_[:alnum:]]*)\b\s* +(=>) + beginCaptures + + 1 + + name + storage.modifier.cs + + 2 + + name + entity.name.variable.parameter.cs + + 3 + + name + keyword.operator.arrow.cs + + + end + (?=\)|;) + patterns + + + include + #block + + + include + #expression + + + + + begin + (?x) +(?:\b(async)\b\s*)? +(\(.*\))\s* +(=>) + beginCaptures + + 1 + + name + storage.modifier.cs + + 2 + + patterns + + + include + #lambda-parameter-list + + + + 3 + + name + keyword.operator.arrow.cs + + + end + (?=\)|;) + patterns + + + include + #block + + + include + #expression + + + + + begin + (?x) +(?:\b(async)\b\s*)? +(?:\b(delegate)\b\s*) + beginCaptures + + 1 + + name + storage.modifier.cs + + 2 + + name + keyword.other.delegate.cs + + + end + (?=\)|;) + patterns + + + include + #parenthesized-parameter-list + + + include + #block + + + include + #expression + + + + + + lambda-parameter-list + + begin + (\() + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + (\)) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #attribute-section + + + include + #lambda-parameter + + + include + #punctuation-comma + + + + lambda-parameter + + match + (?x) +(ref|out)?\s* +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +)? +\b(\g<identifier>)\b\s* +(?=[,)]) + captures + + 1 + + name + storage.modifier.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.parameter.cs + + + + type + + name + meta.type.cs + patterns + + + include + #comment + + + include + #tuple-type + + + include + #type-builtin + + + include + #type-name + + + include + #type-arguments + + + include + #type-array-suffix + + + include + #type-nullable-suffix + + + + tuple-type + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #tuple-element + + + include + #punctuation-comma + + + + tuple-element + + match + (?x) +(?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) +) +(?:\b(?<tuple-name>\g<identifier>)\b)? + captures + + 1 + + patterns + + + include + #type + + + + 6 + + name + entity.name.variable.tuple-element.cs + + + + type-builtin + + match + \b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort|void)\b + captures + + 1 + + name + keyword.type.cs + + + + type-name + + patterns + + + match + ([_[:alpha:]][_[:alnum:]]*)\s*(\:\:) + captures + + 1 + + name + entity.name.type.alias.cs + + 2 + + name + punctuation.separator.coloncolon.cs + + + + + match + ([_[:alpha:]][_[:alnum:]]*)\s*(\.) + captures + + 1 + + name + storage.type.cs + + 2 + + name + punctuation.accessor.cs + + + + + match + (\.)\s*([_[:alpha:]][_[:alnum:]]*) + captures + + 1 + + name + punctuation.accessor.cs + + 2 + + name + storage.type.cs + + + + + name + storage.type.cs + match + [_[:alpha:]][_[:alnum:]]* + + + + type-arguments + + name + meta.type.parameters.cs + begin + < + beginCaptures + + 0 + + name + punctuation.definition.typeparameters.begin.cs + + + end + > + endCaptures + + 0 + + name + punctuation.definition.typeparameters.end.cs + + + patterns + + + include + #comment + + + include + #type + + + include + #punctuation-comma + + + + type-array-suffix + + begin + \[ + beginCaptures + + 0 + + name + punctuation.squarebracket.open.cs + + + end + \] + endCaptures + + 0 + + name + punctuation.squarebracket.close.cs + + + patterns + + + include + #punctuation-comma + + + + type-nullable-suffix + + match + \? + captures + + 0 + + name + punctuation.separator.question-mark.cs + + + + operator-assignment + + name + keyword.operator.assignment.cs + match + (?<!=|!)(=)(?!=) + + punctuation-comma + + name + punctuation.separator.comma.cs + match + , + + punctuation-semicolon + + name + punctuation.terminator.statement.cs + match + ; + + punctuation-accessor + + name + punctuation.accessor.cs + match + \. + + preprocessor + + name + meta.preprocessor.cs + begin + ^\s*(\#)\s* + beginCaptures + + 1 + + name + punctuation.separator.hash.cs + + + end + (?<=$) + patterns + + + include + #comment + + + include + #preprocessor-define-or-undef + + + include + #preprocessor-if-or-elif + + + include + #preprocessor-else-or-endif + + + include + #preprocessor-warning-or-error + + + include + #preprocessor-region + + + include + #preprocessor-endregion + + + include + #preprocessor-line + + + include + #preprocessor-pragma-warning + + + include + #preprocessor-pragma-checksum + + + + preprocessor-define-or-undef + + match + \b(?:(define)|(undef))\b\s*\b([_[:alpha:]][_[:alnum:]]*)\b + captures + + 1 + + name + keyword.preprocessor.define.cs + + 2 + + name + keyword.preprocessor.undef.cs + + 3 + + name + entity.name.variable.preprocessor.symbol.cs + + + + preprocessor-if-or-elif + + begin + \b(?:(if)|(elif))\b + beginCaptures + + 1 + + name + keyword.preprocessor.if.cs + + 2 + + name + keyword.preprocessor.elif.cs + + + end + (?=$) + patterns + + + include + #comment + + + include + #preprocessor-expression + + + + preprocessor-else-or-endif + + match + \b(?:(else)|(endif))\b + captures + + 1 + + name + keyword.preprocessor.else.cs + + 2 + + name + keyword.preprocessor.endif.cs + + + + preprocessor-warning-or-error + + match + \b(?:(warning)|(error))\b\s*(.*)(?=$) + captures + + 1 + + name + keyword.preprocessor.warning.cs + + 2 + + name + keyword.preprocessor.error.cs + + 3 + + name + string.unquoted.preprocessor.message.cs + + + + preprocessor-region + + match + \b(region)\b\s*(.*)(?=$) + captures + + 1 + + name + keyword.preprocessor.region.cs + + 2 + + name + string.unquoted.preprocessor.message.cs + + + + preprocessor-endregion + + match + \b(endregion)\b + captures + + 1 + + name + keyword.preprocessor.endregion.cs + + + + preprocessor-line + + begin + \b(line)\b + beginCaptures + + 1 + + name + keyword.preprocessor.line.cs + + + end + (?=$) + patterns + + + match + \b(?:(default|hidden)) + captures + + 1 + + name + keyword.preprocessor.default.cs + + 2 + + name + keyword.preprocessor.hidden.cs + + + + + match + [0-9]+ + captures + + 0 + + name + constant.numeric.decimal.cs + + + + + match + \"[^"]*\" + captures + + 0 + + name + string.quoted.double.cs + + + + + + preprocessor-pragma-warning + + match + \b(pragma)\b\s*\b(warning)\b\s*\b(?:(disable)|(restore))\b(\s*[0-9]+(?:\s*,\s*[0-9]+)?)? + captures + + 1 + + name + keyword.preprocessor.pragma.cs + + 2 + + name + keyword.preprocessor.warning.cs + + 3 + + name + keyword.preprocessor.disable.cs + + 4 + + name + keyword.preprocessor.restore.cs + + 5 + + patterns + + + match + [0-9]+ + captures + + 0 + + name + constant.numeric.decimal.cs + + + + + include + #punctuation-comma + + + + + + preprocessor-pragma-checksum + + match + \b(pragma)\b\s*\b(checksum)\b\s*(\"[^"]*\")\s*(\"[^"]*\")\s*(\"[^"]*\") + captures + + 1 + + name + keyword.preprocessor.pragma.cs + + 2 + + name + keyword.preprocessor.checksum.cs + + 3 + + name + string.quoted.double.cs + + 4 + + name + string.quoted.double.cs + + 5 + + name + string.quoted.double.cs + + + + preprocessor-expression + + patterns + + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #preprocessor-expression + + + + + match + \b(?:(true)|(false)|([_[:alpha:]][_[:alnum:]]*))\b + captures + + 1 + + name + constant.language.boolean.true.cs + + 2 + + name + constant.language.boolean.false.cs + + 3 + + name + entity.name.variable.preprocessor.symbol.cs + + + + + match + (==|!=)|(\!|&&|\|\|) + captures + + 1 + + name + keyword.operator.comparison.cs + + 2 + + name + keyword.operator.logical.cs + + + + + + comment + + patterns + + + name + comment.block.cs + begin + /\* + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + \*/ + endCaptures + + 0 + + name + punctuation.definition.comment.cs + + + + + begin + (^\s+)?(?=//) + beginCaptures + + 1 + + name + punctuation.whitespace.comment.leading.cs + + + end + (?=$) + patterns + + + name + comment.block.documentation.cs + begin + (?<!/)///(?!/) + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + (?=$) + patterns + + + include + #xml-doc-comment + + + + + name + comment.line.double-slash.cs + begin + (?<!/)//(?!/) + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + (?=$) + + + + + + xml-doc-comment + + patterns + + + include + #xml-comment + + + include + #xml-character-entity + + + include + #xml-cdata + + + include + #xml-tag + + + + xml-tag + + name + meta.tag.cs + begin + (?x) +(</?) +( + (?: + ([-_[:alnum:]]+) + (:) + )? + ([-_[:alnum:]]+) +) + beginCaptures + + 1 + + name + punctuation.definition.tag.cs + + 2 + + name + entity.name.tag.cs + + 3 + + name + entity.name.tag.namespace.cs + + 4 + + name + punctuation.separator.colon.cs + + 5 + + name + entity.name.tag.localname.cs + + + end + (/?>) + endCaptures + + 1 + + name + punctuation.definition.tag.cs + + + patterns + + + include + #xml-attribute + + + + xml-attribute + + patterns + + + match + (?x) +(?:^|\s+) +( + (?: + ([-_[:alnum:]]+) + (:) + )? + ([-_[:alnum:]]+) +) +(=) + captures + + 1 + + name + entity.other.attribute-name.cs + + 2 + + name + entity.other.attribute-name.namespace.cs + + 3 + + name + punctuation.separator.colon.cs + + 4 + + name + entity.other.attribute-name.localname.cs + + 5 + + name + punctuation.separator.equals.cs + + + + + include + #xml-string + + + + xml-cdata + + name + string.unquoted.cdata.cs + begin + <!\[CDATA\[ + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + \]\]> + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + + xml-string + + patterns + + + name + string.quoted.single.cs + begin + \' + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + \' + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #xml-character-entity + + + + + name + string.quoted.double.cs + begin + \" + beginCaptures + + 0 + + name + punctuation.definition.string.begin.cs + + + end + \" + endCaptures + + 0 + + name + punctuation.definition.string.end.cs + + + patterns + + + include + #xml-character-entity + + + + + + xml-character-entity + + patterns + + + name + constant.character.entity.cs + match + (?x) +(&) +( + (?:[[:alpha:]:_][[:alnum:]:_.-]*)| + (?:\#[[:digit:]]+)| + (?:\#x[[:xdigit:]]+) +) +(;) + captures + + 1 + + name + punctuation.definition.constant.cs + + 3 + + name + punctuation.definition.constant.cs + + + + + name + invalid.illegal.bad-ampersand.cs + match + & + + + + xml-comment + + name + comment.block.cs + begin + <!-- + beginCaptures + + 0 + + name + punctuation.definition.comment.cs + + + end + --> + endCaptures + + 0 + + name + punctuation.definition.comment.cs + + + + + + \ No newline at end of file From eed1bbe0039e5373438d0d674f95f3fa0f8ea62e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 17:55:58 -0800 Subject: [PATCH 144/192] Revert unintended changes in package.json --- package.json | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/package.json b/package.json index 06f5514647..9581f6b680 100644 --- a/package.json +++ b/package.json @@ -478,11 +478,13 @@ "razor" ] }, + "runtime": "node", "runtimeArgs": [], "variables": { "pickProcess": "csharp.listProcess", "pickRemoteProcess": "csharp.listRemoteProcess" }, + "program": "./out/src/coreclr-debug/proxy.js", "aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217", "configurationAttributes": { "launch": { @@ -1142,16 +1144,7 @@ "request": "attach", "processId": "${command.pickProcess}" } - ], - "windows": { - "program": "./.debugger/OpenDebugAD7.exe" - }, - "osx": { - "program": "./.debugger/OpenDebugAD7" - }, - "linux": { - "program": "./.debugger/OpenDebugAD7" - } + ] } ] } From 47fb5c52799d936119639e757c2259f414946c3a Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 10 Jan 2017 18:19:22 -0800 Subject: [PATCH 145/192] Remove 'meta.type.parameters.cs' name which causes tuple element names inside type arguments to be colored incorrectly --- syntaxes/csharp.tmLanguage | 2 -- syntaxes/csharp.tmLanguage.yml | 1 - test/syntaxes/type-names.test.syntax.ts | 45 +++++++++++++++++++------ 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index e56e7aeb64..d3c135acff 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -5634,8 +5634,6 @@ type-arguments - name - meta.type.parameters.cs begin < beginCaptures diff --git a/syntaxes/csharp.tmLanguage.yml b/syntaxes/csharp.tmLanguage.yml index 1e1cb27938..94556c4eef 100644 --- a/syntaxes/csharp.tmLanguage.yml +++ b/syntaxes/csharp.tmLanguage.yml @@ -2191,7 +2191,6 @@ repository: match: '[_[:alpha:]][_[:alnum:]]*' type-arguments: - name: meta.type.parameters.cs begin: '<' beginCaptures: '0': { name: punctuation.definition.typeparameters.begin.cs } diff --git a/test/syntaxes/type-names.test.syntax.ts b/test/syntaxes/type-names.test.syntax.ts index 55c80b2afb..9ee157b971 100644 --- a/test/syntaxes/type-names.test.syntax.ts +++ b/test/syntaxes/type-names.test.syntax.ts @@ -11,7 +11,6 @@ describe("Grammar", () => { describe("Type names", () => { it("built-in type - object", () => { - const input = Input.InClass(`object x;`); const tokens = tokenize(input); @@ -22,7 +21,6 @@ describe("Grammar", () => { }); it("qualified name - System.Object", () => { - const input = Input.InClass(`System.Object x;`); const tokens = tokenize(input); @@ -35,7 +33,6 @@ describe("Grammar", () => { }); it("globally-qualified name - global::System.Object", () => { - const input = Input.InClass(`global::System.Object x;`); const tokens = tokenize(input); @@ -50,7 +47,6 @@ describe("Grammar", () => { }); it("tuple type - (int, int)", () => { - const input = Input.InClass(`(int, int) x;`); const tokens = tokenize(input); @@ -64,8 +60,23 @@ describe("Grammar", () => { Token.Punctuation.Semicolon]); }); - it("generic type - List", () => { + it("tuple type with element names - (int i, int j)", () => { + const input = Input.InClass(`(int i, int j) x;`); + const tokens = tokenize(input); + tokens.should.deep.equal([ + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Identifiers.TupleElementName("i"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Identifiers.TupleElementName("j"), + Token.Punctuation.CloseParen, + Token.Identifiers.FieldName("x"), + Token.Punctuation.Semicolon]); + }); + + it("generic type - List", () => { const input = Input.InClass(`List x;`); const tokens = tokenize(input); @@ -79,7 +90,6 @@ describe("Grammar", () => { }); it("generic type with tuple - List<(int, int)>", () => { - const input = Input.InClass(`List<(int, int)> x;`); const tokens = tokenize(input); @@ -96,8 +106,26 @@ describe("Grammar", () => { Token.Punctuation.Semicolon]); }); - it("generic type with multiple parameters - Dictionary", () => { + it("generic type with tuple with element names - List<(int i, int j)>", () => { + const input = Input.InClass(`List<(int i, int j)> x;`); + const tokens = tokenize(input); + + tokens.should.deep.equal([ + Token.Type("List"), + Token.Punctuation.TypeParameters.Begin, + Token.Punctuation.OpenParen, + Token.PrimitiveType.Int, + Token.Identifiers.TupleElementName("i"), + Token.Punctuation.Comma, + Token.PrimitiveType.Int, + Token.Identifiers.TupleElementName("j"), + Token.Punctuation.CloseParen, + Token.Punctuation.TypeParameters.End, + Token.Identifiers.FieldName("x"), + Token.Punctuation.Semicolon]); + }); + it("generic type with multiple parameters - Dictionary", () => { const input = Input.InClass(`Dictionary x;`); const tokens = tokenize(input); @@ -113,7 +141,6 @@ describe("Grammar", () => { }); it("qualified generic type - System.Collections.Generic.List", () => { - const input = Input.InClass(`System.Collections.Generic.List x;`); const tokens = tokenize(input); @@ -133,7 +160,6 @@ describe("Grammar", () => { }); it("generic type with nested type - List.Enumerator", () => { - const input = Input.InClass(`List.Enumerator x;`); const tokens = tokenize(input); @@ -149,7 +175,6 @@ describe("Grammar", () => { }); it("nullable type - int?", () => { - const input = Input.InClass(`int? x;`); const tokens = tokenize(input); From f62e38e6bf42cd49bac0e87f1838cdea0a143f52 Mon Sep 17 00:00:00 2001 From: Gregg Miskelly Date: Fri, 13 Jan 2017 15:29:51 -0800 Subject: [PATCH 146/192] Update README.md to mention Python (#1121) Python 2.7 is required on Windows to install dependencies. I am assuming it is required on other platforms too, but since npm itself requires it on Linux, and I don't have a spare Mac to verify, I can't say for sure. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 8a4c193f33..b19a4c852d 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,7 @@ The C# extension now supports basic debugging capabilities! See http://aka.ms/vs First install: * Node.js (newer than 4.3.1) * Npm (newer 2.14.12) +* Python 2.7 In case you get a *node-gyp* error [follow the instrutions here](https://github.com/nodejs/node-gyp/blob/master/README.md) to fix it. The *vscode-textmate* package pulls in a native node dependency and those instructions will set up the node build tool which deals with those. From 064fd7113fc4338db87207eb378db1a958aecd98 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Wed, 18 Jan 2017 11:27:35 -0800 Subject: [PATCH 147/192] Simplifying remoteProcessPicker (#1123) * Renaming LaunchConfigurations To LaunchBrowserPlatformOptions. * Simplifying retrieving pipeTransport for attach The launch.json configuration is passed in via the args parameter. There is no need to read the launch.json file and find the correct configuration. The old method would only grab the first launch json name that matched. This change allows duplicate launch names but still grabs the correct pipeTransport. * Removing dead code --- src/features/processPicker.ts | 39 ++++------------------------------- 1 file changed, 4 insertions(+), 35 deletions(-) diff --git a/src/features/processPicker.ts b/src/features/processPicker.ts index fcf9c385bd..ebdbb5ac25 100644 --- a/src/features/processPicker.ts +++ b/src/features/processPicker.ts @@ -61,43 +61,20 @@ export class RemoteAttachPicker { return Promise.reject(new Error("Name not defined in current configuration.")); } - // Build path for launch.json to find pipeTransport - const vscodeFolder: string = path.join(vscode.workspace.rootPath, '.vscode'); - let launchJsonPath: string = path.join(vscodeFolder, 'launch.json'); - - // Read launch.json - let json: any = JSON.parse(fs.readFileSync(launchJsonPath).toString()); - - // Find correct pipeTransport via selected name - let config; - let configIdx: number; - for (configIdx = 0; configIdx < json.configurations.length; ++configIdx) { - if (json.configurations[configIdx].name === name) { - config = json.configurations[configIdx]; - break; - } - } - - if (configIdx == json.configurations.length) { - // Name not found in list of given configurations. - return Promise.reject(new Error(name + " could not be found in configurations.")); - } - - if (!config.pipeTransport || !config.pipeTransport.debuggerPath) { + if (!args.pipeTransport || !args.pipeTransport.debuggerPath) { // Missing PipeTransport and debuggerPath, prompt if user wanted to just do local attach. return Promise.reject(new Error("Configuration \"" + name + "\" in launch.json does not have a " + "pipeTransport argument with debuggerPath for pickRemoteProcess. Use pickProcess for local attach.")); } else { - let pipeProgram = config.pipeTransport.pipeProgram; - let pipeArgs = config.pipeTransport.pipeArgs; - let platformSpecificPipeTransportOptions = RemoteAttachPicker.getPlatformSpecificPipeTransportOptions(config); + let pipeProgram = args.pipeTransport.pipeProgram; + let pipeArgs = args.pipeTransport.pipeArgs; + let platformSpecificPipeTransportOptions = RemoteAttachPicker.getPlatformSpecificPipeTransportOptions(args); if (platformSpecificPipeTransportOptions) { pipeProgram = platformSpecificPipeTransportOptions.pipeProgram || pipeProgram; pipeArgs = platformSpecificPipeTransportOptions.pipeArgs || pipeArgs; } - let argList = RemoteAttachPicker.createArgumentList(pipeArgs); let pipeCmd: string = `"${pipeProgram}" ${argList}`; return RemoteAttachPicker.getRemoteOSAndProcesses(pipeCmd).then(processes => { @@ -170,14 +147,6 @@ export class RemoteAttachPicker { } }); } - - public static getRemoteProcesses(pipeCmd: string, os: string): Promise { - const psCommand = os === 'darwin' ? RemoteAttachPicker.osxPsCommand : RemoteAttachPicker.linuxPsCommand; - - return execChildProcessAndOutputErrorToChannel(`${pipeCmd} ${psCommand}`, null, RemoteAttachPicker._channel).then(output => { - return sortProcessEntries(PsOutputParser.parseProcessFromPs(output), os); - }); - } } class Process { From 84b3a3b204940acf077acf0bf82a26f202101922 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Wed, 18 Jan 2017 14:00:53 -0800 Subject: [PATCH 148/192] Linesplit and ohmyzshfix (#1132) * Renaming LaunchConfigurations To LaunchBrowserPlatformOptions. * Simplifying retrieving pipeTransport for attach The launch.json configuration is passed in via the args parameter. There is no need to read the launch.json file and find the correct configuration. The old method would only grab the first launch json name that matched. This change allows duplicate launch names but still grabs the correct pipeTransport. * Removing dead code * Fixes for OhMyZsh and Line Splitting Line splitting is dependant on program used. E.g. plink or ssh, so line splitting for the processes given is handling both \r\n and just \n. OhMyZsh fails with the current command. Fix is to add extra [] around existing []. * Fixing command to work for sh and adding error msg If a user decides to launch the remoteProcessPicker and clicks outside of the process picker window, it will try to attach to the process id 'null'. Adding in a new error message. * Simplifying command --- src/features/processPicker.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/features/processPicker.ts b/src/features/processPicker.ts index ebdbb5ac25..0467b0700a 100644 --- a/src/features/processPicker.ts +++ b/src/features/processPicker.ts @@ -84,7 +84,7 @@ export class RemoteAttachPicker { placeHolder: "Select the process to attach to" }; return vscode.window.showQuickPick(processes, attachPickOptions).then(item => { - return item ? item.id : null; + return item ? item.id : Promise.reject(new Error("Could not find a process id to attach.")); }); }); } @@ -119,13 +119,13 @@ export class RemoteAttachPicker { public static getRemoteOSAndProcesses(pipeCmd: string): Promise { // Commands to get OS and processes - const command = `uname && if [ $(uname) == "Linux" ] ; then ${RemoteAttachPicker.linuxPsCommand} ; elif [ $(uname) == "Darwin" ] ; ` + - `then ${RemoteAttachPicker.osxPsCommand}; fi`; + const command = `bash -c 'uname && if [ $(uname) == "Linux" ] ; then ${RemoteAttachPicker.linuxPsCommand} ; elif [ $(uname) == "Darwin" ] ; ` + + `then ${RemoteAttachPicker.osxPsCommand}; fi'`; return execChildProcessAndOutputErrorToChannel(`${pipeCmd} "${command}"`, null, RemoteAttachPicker._channel).then(output => { // OS will be on first line // Processess will follow if listed - let lines = output.split(os.EOL); + let lines = output.split(/\r?\n/); if (lines.length == 0) { return Promise.reject(new Error("Pipe transport failed to get OS and processes.")); From 7224ed4e4b67152ebeba63a3e81972d909b1d328 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Fri, 20 Jan 2017 15:54:51 -0800 Subject: [PATCH 149/192] Removing unused imports in ProcessPicker.ts (#1137) --- src/features/processPicker.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/features/processPicker.ts b/src/features/processPicker.ts index 0467b0700a..bd99d40b2c 100644 --- a/src/features/processPicker.ts +++ b/src/features/processPicker.ts @@ -6,8 +6,6 @@ import * as os from 'os'; import * as vscode from 'vscode'; import * as child_process from 'child_process'; -import * as fs from 'fs'; -import * as path from 'path'; export interface AttachItem extends vscode.QuickPickItem { id: string; From 23326b8c7ad811a71be905780df1569a43c1d754 Mon Sep 17 00:00:00 2001 From: Gregg Miskelly Date: Fri, 20 Jan 2017 16:41:08 -0800 Subject: [PATCH 150/192] Update the debugger to 1-7-0 (#1136) This updates the debugger to 1-7-0. This version of the debugger contains a fix for #1107 and #1105. --- package.json | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/package.json b/package.json index 9581f6b680..014293dfce 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.6.2", + "version": "1.7.0-beta1", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", @@ -153,8 +153,8 @@ }, { "description": ".NET Core Debugger (Windows / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-win7-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-win7-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-win7-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-win7-x64.zip", "installPath": ".debugger", "runtimeIds": [ "win7-x64" @@ -162,8 +162,8 @@ }, { "description": ".NET Core Debugger (macOS / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-osx.10.11-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-osx.10.11-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-osx.10.11-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-osx.10.11-x64.zip", "installPath": ".debugger", "runtimeIds": [ "osx.10.11-x64" @@ -175,8 +175,8 @@ }, { "description": ".NET Core Debugger (CentOS / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-centos.7-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-centos.7-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-centos.7-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-centos.7-x64.zip", "installPath": ".debugger", "runtimeIds": [ "centos.7-x64" @@ -188,8 +188,8 @@ }, { "description": ".NET Core Debugger (Debian / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-debian.8-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-debian.8-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-debian.8-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-debian.8-x64.zip", "installPath": ".debugger", "runtimeIds": [ "debian.8-x64" @@ -201,8 +201,8 @@ }, { "description": ".NET Core Debugger (Fedora 23 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-fedora.23-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-fedora.23-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-fedora.23-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-fedora.23-x64.zip", "installPath": ".debugger", "runtimeIds": [ "fedora.23-x64" @@ -214,8 +214,8 @@ }, { "description": ".NET Core Debugger (Fedora 24 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-fedora.24-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-fedora.24-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-fedora.24-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-fedora.24-x64.zip", "installPath": ".debugger", "runtimeIds": [ "fedora.24-x64" @@ -227,8 +227,8 @@ }, { "description": ".NET Core Debugger (OpenSUSE 13 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-opensuse.13.2-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-opensuse.13.2-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.13.2-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.13.2-x64.zip", "installPath": ".debugger", "runtimeIds": [ "opensuse.13.2-x64" @@ -240,8 +240,8 @@ }, { "description": ".NET Core Debugger (OpenSUSE 42 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-opensuse.42.1-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-opensuse.42.1-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.42.1-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.42.1-x64.zip", "installPath": ".debugger", "runtimeIds": [ "opensuse.42.1-x64" @@ -253,8 +253,8 @@ }, { "description": ".NET Core Debugger (RHEL / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-rhel.7.2-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-rhel.7.2-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-rhel.7.2-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-rhel.7.2-x64.zip", "installPath": ".debugger", "runtimeIds": [ "rhel.7-x64" @@ -266,8 +266,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 14.04 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.14.04-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.14.04-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.14.04-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.14.04-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.14.04-x64" @@ -279,8 +279,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 16.04 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.16.04-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.16.04-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.04-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.04-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.16.04-x64" @@ -292,8 +292,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 16.10 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.16.10-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-6-3/coreclr-debug-ubuntu.16.10-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.10-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.10-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.16.10-x64" From da8cee209b11c46516ea5422b06f0532adceca8e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 24 Jan 2017 12:58:17 -0800 Subject: [PATCH 151/192] Update typing for VSCode tasks --- typings/vscode-tasks.d.ts | 60 +++++++++++++++++++++++++++++++-------- 1 file changed, 48 insertions(+), 12 deletions(-) diff --git a/typings/vscode-tasks.d.ts b/typings/vscode-tasks.d.ts index 25c76eaf61..a4a4245f7d 100644 --- a/typings/vscode-tasks.d.ts +++ b/typings/vscode-tasks.d.ts @@ -45,6 +45,13 @@ declare module "vscode-tasks" { */ isShellCommand?: boolean; + /** + * Specifies whether a global command is watching the filesystem. A task.json + * file can either contain a global isWatching property or a tasks property + * but not both. + */ + isWatching?: boolean; + /** * The command options used when the command is executed. Can be omitted. */ @@ -79,7 +86,7 @@ declare module "vscode-tasks" { * prefix (e.g. /t: for msbuild). This property can be used to control such * a prefix. */ - taskSelector?:string; + taskSelector?: string; /** * The problem matcher to be used if a global command is executed (e.g. no tasks @@ -110,7 +117,7 @@ declare module "vscode-tasks" { * The environment of the executed program or shell. If omitted * the parent process' environment is used. */ - env?: { [key:string]:string; }; + env?: { [key: string]: string; }; } /** @@ -132,13 +139,18 @@ declare module "vscode-tasks" { /** * Whether this task maps to the default build command. */ - isBuildCommand?:boolean; + isBuildCommand?: boolean; /** * Whether this task maps to the default test command. */ isTestCommand?: boolean; + /** + * Whether the executed command is kept alive and is watching the file system. + */ + isWatching?: boolean; + /** * Controls whether the output view of the running tasks is brought to front or not. * See BaseTaskConfiguration#showOutput for details. @@ -209,6 +221,35 @@ declare module "vscode-tasks" { * problems spread over multiple lines. */ pattern?: string | ProblemPattern | ProblemPattern[]; + + /** + * Additional information used to detect when a background task (like a watching task in Gulp) + * is active. + */ + watching?: WatchingMatcher; + } + + /** + * A description to track the start and end of a watching task. + */ + export interface WatchingMatcher { + + /** + * If set to true the watcher is in active mode when the task + * starts. This is equals of issuing a line that matches the + * beginPattern. + */ + activeOnStart?: boolean; + + /** + * If matched in the output the start of a watching task is signaled. + */ + beginsPattern?: string; + + /** + * If matched in the output the end of a watching task is signaled. + */ + endsPattern?: string; } export interface ProblemPattern { @@ -221,9 +262,8 @@ declare module "vscode-tasks" { /** * The match group index of the filename. - * If omitted 1 is used. */ - file?: number; + file: number; /** * The match group index of the problems's location. Valid location @@ -234,15 +274,12 @@ declare module "vscode-tasks" { /** * The match group index of the problem's line in the source file. - * - * Defaults to 2. + * Can only be omitted if location is specified. */ line?: number; /** * The match group index of the problem's column in the source file. - * - * Defaults to 3. */ column?: number; @@ -276,10 +313,9 @@ declare module "vscode-tasks" { code?: number; /** - * The match group index of the message. If omitted it defaults - * to 4 if location is specified. Otherwise it defaults to 5. + * The match group index of the message. Defaults to 0. */ - message?: number; + message: number; /** * Specifies if the last pattern in a multi line problem matcher should From a5e4602838fd0c1f06f6f8681d4dff4ff7943e8d Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 24 Jan 2017 13:20:47 -0800 Subject: [PATCH 152/192] Be tolerant of tasks.json files with no 'tasks' node or with an os-specific node --- src/assets.ts | 40 ++++++++++++++++++++++++++++++++++------ 1 file changed, 34 insertions(+), 6 deletions(-) diff --git a/src/assets.ts b/src/assets.ts index 27f782e321..9edea2fdb5 100644 --- a/src/assets.ts +++ b/src/assets.ts @@ -346,6 +346,35 @@ function getOperations(generator: AssetGenerator) { getLaunchOperations(generator.launchJsonPath, operations)); } +function getBuildTasks(tasksConfiguration: tasks.TaskConfiguration) { + let result: tasks.TaskDescription[] = []; + + function findBuildTask(tasksDescriptions: tasks.TaskDescription[]) { + if (tasksDescriptions) { + const buildTask = tasksDescriptions.find(td => td.taskName === 'build'); + if (buildTask !== undefined) { + result.push(buildTask); + } + } + } + + findBuildTask(tasksConfiguration.tasks); + + if (tasksConfiguration.windows) { + findBuildTask(tasksConfiguration.windows.tasks); + } + + if (tasksConfiguration.osx) { + findBuildTask(tasksConfiguration.osx.tasks); + } + + if (tasksConfiguration.linux) { + findBuildTask(tasksConfiguration.linux.tasks); + } + + return result; +} + function getBuildOperations(tasksJsonPath: string) { return new Promise((resolve, reject) => { fs.exists(tasksJsonPath, exists => { @@ -356,19 +385,18 @@ function getBuildOperations(tasksJsonPath: string) { } const text = buffer.toString(); - - let buildTask: tasks.TaskDescription; + let tasksConfiguration: tasks.TaskConfiguration; try { - const tasksJson: tasks.TaskConfiguration = tolerantParse(text); - buildTask = tasksJson.tasks.find(td => td.taskName === 'build'); + tasksConfiguration = tolerantParse(text); } catch (error) { vscode.window.showErrorMessage(`Failed to parse tasks.json file`); - buildTask = undefined; } - resolve({ updateTasksJson: (buildTask === undefined) }); + let buildTasks = getBuildTasks(tasksConfiguration); + + resolve({ updateTasksJson: (buildTasks.length > 0) }); }); } else { From 52ed4424d4e5ef97c79913f9d56f0d2a3d08d7f9 Mon Sep 17 00:00:00 2001 From: filipw Date: Tue, 24 Jan 2017 22:24:10 +0100 Subject: [PATCH 153/192] map namespace to CompletionItemKind.Module --- src/features/completionItemProvider.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/features/completionItemProvider.ts b/src/features/completionItemProvider.ts index f0cdfd47b2..551d027824 100644 --- a/src/features/completionItemProvider.ts +++ b/src/features/completionItemProvider.ts @@ -89,3 +89,4 @@ _kinds['Class'] = CompletionItemKind.Class; _kinds['Field'] = CompletionItemKind.Field; _kinds['EventField'] = CompletionItemKind.File; _kinds['Method'] = CompletionItemKind.Method; +_kinds['Namespace'] = CompletionItemKind.Module; \ No newline at end of file From f0ee015c01b25f642a1127f8dce39b545b776419 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 24 Jan 2017 14:02:06 -0800 Subject: [PATCH 154/192] Map more completion items to kinds --- src/features/completionItemProvider.ts | 29 +++++++++++++++++++------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/src/features/completionItemProvider.ts b/src/features/completionItemProvider.ts index 551d027824..855c66963d 100644 --- a/src/features/completionItemProvider.ts +++ b/src/features/completionItemProvider.ts @@ -79,14 +79,27 @@ export default class OmniSharpCompletionItemProvider extends AbstractSupport imp } const _kinds: { [kind: string]: CompletionItemKind; } = Object.create(null); -_kinds['Variable'] = CompletionItemKind.Variable; -_kinds['Struct'] = CompletionItemKind.Interface; -_kinds['Interface'] = CompletionItemKind.Interface; -_kinds['Enum'] = CompletionItemKind.Enum; -_kinds['EnumMember'] = CompletionItemKind.Property; -_kinds['Property'] = CompletionItemKind.Property; + +// types _kinds['Class'] = CompletionItemKind.Class; +_kinds['Delegate'] = CompletionItemKind.Class; // need a better option for this. +_kinds['Enum'] = CompletionItemKind.Enum; +_kinds['Interface'] = CompletionItemKind.Interface; +_kinds['Struct'] = CompletionItemKind.Class; // need a better option for this. + +// variables +_kinds['Local'] = CompletionItemKind.Variable; +_kinds['Parameter'] = CompletionItemKind.Variable; +_kinds['RangeVariable'] = CompletionItemKind.Variable; + +// members +_kinds['EnumMember'] = CompletionItemKind.Property; // need a better option for this. +_kinds['Event'] = CompletionItemKind.Field; // need a better option for this. _kinds['Field'] = CompletionItemKind.Field; -_kinds['EventField'] = CompletionItemKind.File; +_kinds['Property'] = CompletionItemKind.Property; _kinds['Method'] = CompletionItemKind.Method; -_kinds['Namespace'] = CompletionItemKind.Module; \ No newline at end of file + +// other stuff +_kinds['Label'] = CompletionItemKind.Unit; // need a better option for this. +_kinds['Keyword'] = CompletionItemKind.Keyword; +_kinds['Namespace'] = CompletionItemKind.Module; From 7b364d5f39bff8cd78d9bd8d4cebf014f4d5f82c Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 24 Jan 2017 14:55:09 -0800 Subject: [PATCH 155/192] Don't search for 'build' tasks if there was a parse error --- src/assets.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/assets.ts b/src/assets.ts index 9edea2fdb5..2bc21b5d01 100644 --- a/src/assets.ts +++ b/src/assets.ts @@ -346,7 +346,7 @@ function getOperations(generator: AssetGenerator) { getLaunchOperations(generator.launchJsonPath, operations)); } -function getBuildTasks(tasksConfiguration: tasks.TaskConfiguration) { +function getBuildTasks(tasksConfiguration: tasks.TaskConfiguration): tasks.TaskDescription[] { let result: tasks.TaskDescription[] = []; function findBuildTask(tasksDescriptions: tasks.TaskDescription[]) { @@ -392,6 +392,7 @@ function getBuildOperations(tasksJsonPath: string) { } catch (error) { vscode.window.showErrorMessage(`Failed to parse tasks.json file`); + return resolve({ updateTasksJson: false }); } let buildTasks = getBuildTasks(tasksConfiguration); From e5a25544bc822dde1b2ce8450a2ee656ca8a8751 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 24 Jan 2017 15:00:36 -0800 Subject: [PATCH 156/192] Fix Travis script --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c3a4ce12c8..36a2c807a5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,7 +25,6 @@ install: script: - npm test --silent - - npm run test-syntax deploy: provider: releases From a496cb2588ad6176f1e3f9b5832023e32c043333 Mon Sep 17 00:00:00 2001 From: Gregg Miskelly Date: Wed, 25 Jan 2017 12:09:20 -0800 Subject: [PATCH 157/192] Freshen up Debugger.md (#1148) Debugger.md had a bunch of content that was showing its age. So I took a pass over it to make it a bit more fresh. --- debugger.md | 40 +++++++++++----------------------------- 1 file changed, 11 insertions(+), 29 deletions(-) diff --git a/debugger.md b/debugger.md index 15f59b487b..802ee1dad9 100644 --- a/debugger.md +++ b/debugger.md @@ -14,7 +14,7 @@ File bugs and feature requests [here](https://github.com/OmniSharp/omnisharp-vsc ###First Time setup ##### 1: Get Visual Studio Code -Install Visual Studio Code (VSC). Pick the latest VSC version from here: https://code.visualstudio.com Make sure it is at least 0.10.10. +Install Visual Studio Code (VSC). Pick the latest VSC version from here: https://code.visualstudio.com Make sure it is at least 1.5. If you are not sure what version you have, you can see your version of VS Code: @@ -29,7 +29,7 @@ Install the .NET Core command line tools (CLI) by following the installation par ##### 3: Install C# Extension for VS Code Open the command palette in VS Code (F1) and type "ext install C#" to trigger the installation of the extension. VS Code will show a message that the extension has been installed and it will restart. -If you have previously installed the C# extension, make sure that you have version 1.2 or newer. You can check this by opening the command palette (F1) and running 'Extensions: Show Installed Extensions'. +If you have previously installed the C# extension, make sure that you have a recent version. You can check this by opening the command palette (F1) and running 'Extensions: Show Installed Extensions'. ##### 4: Wait for download of platform-specific files The first time that C# code is opened in VS Code, the extension will download the platform-specific files needed for debugging and editing. Debugging and editor features will not work until these steps finish. @@ -47,20 +47,11 @@ You can start from scratch by creating an empty project with `dotnet new`: dotnet new dotnet restore -You can also find some example projects on https://github.com/aspnet/cli-samples +If you want a web project (ASP.NET project) pass `-t web`. For web projects, makes sure to run `bower install` before running so that they can restore assets. ##### 2: Open the directory in VS Code Go to File->Open and open the directory in Visual Studio Code. If this is the first time that the C# extension has been activated, it will now download additional platform-specific dependencies. -**Troubleshooting 'Error while installing .NET Core Debugger':** If the debugger is failing to download its platform-specific dependencies, first verify that you have the 1.0.0-preview2-003121 or newer build of the .NET CLI installed, and it is functioning. You can check this by starting a bash/command prompt and running 'dotnet --info'. - -If the CLI is installed, here are a few additional suggestions: - -* If clicking on 'View Log' doesn't show a log this means that running the 'dotnet --info' command failed. If it succeeds in bash/command prompt, but fails from VS Code, this likely means that your computer once had an older build of .NET CLI installed, and there are still remnants of it which cause VS Code and other processes besides bash to use the older version instead of the current version. You can resolve this issue by uninstalling the .NET Core CLI, and reinstalling the version you want (see below for macOS). -* If 'dotnet restore' is failing, make sure you have an internet connection to nuget.org, and make sure that if additional NuGet.Config files are being used, they have valid content. The log will indicate what NuGet.Config files were used. Try removing the files other than the one coming from the extension itself. - -MacOS .NET CLI Reinstall Instructions: macOS doesn't have uninstall for pkg files (see [known issue](https://github.com/dotnet/core/blob/master/cli/known-issues.md#uninstallingreinstalling-the-pkg-on-os-x)), one option is to remove the dotnet cli directory with `sudo rm -rf /usr/local/share/dotnet` and then install the pkg again. - ##### 3: Add VS Code configuration files to the workspace VS Code needs to be configured so it understands how to build your project and debug it. For this there are two files which need to be added -- .vscode/tasks.json and .vscode/launch.json. @@ -86,24 +77,15 @@ If your code has multiple projects or you would rather generate these files by h ... "program": "${workspaceRoot}/MyLaunchingProject/bin/Debug/netcoreapp1.0/MyLaunchingProject.dll", -##### 4: Windows Only: Enable Portable PDBs -In the future, this step will go away, but for now you need to [change the project.json to use portable PDBs](https://github.com/OmniSharp/omnisharp-vscode/wiki/Portable-PDBs#net-cli-projects-projectjson). - -##### 5: Pick your debug configuration - -The default launch.json offers several different launch configurations depending on what kind of app you are building -- one for command line, one for web, and one for attaching to a running process. - -To configure which configuration you want, bring up the Debug view by clicking on the Debugging icon in the View Bar on the side of VS Code. - -![Debug view icon](https://raw.githubusercontent.com/wiki/OmniSharp/omnisharp-vscode/images/debugging_debugicon.png) - -Now open the configuration drop down from the top and select the one you want. - -![Debug launch configuration drop down](https://raw.githubusercontent.com/wiki/OmniSharp/omnisharp-vscode/images/debug-launch-configurations.png) +##### 4: Start debugging +Your project is now all set. Set a breakpoint or two where you want to stop, click the debugger play button (or hit F5) and you are off. ###Debugging Code compiled on another computer -* If the target binary is built on Linux / OSX, dotnet CLI will produce portable pdbs by default so no action is necessary. -* On Windows, you will need to take additional steps to build [portable PDBs](https://github.com/OmniSharp/omnisharp-vscode/wiki/Portable-PDBs#how-to-generate-portable-pdbs). +If your code was built on a different computer from where you would like to run in there are a few things to keep in mind -- + +* **Source Maps**: Unless your local source code is at exactly the same path as where the code was originally built you will need to add a [sourceFileMap](#source-file-map) to launch.json. +* **Portable PDBs**: If the code was built on Windows, it might have been built using Windows PDBs instead of portable PDBs, but the C# extension only supports portable PDBs. See the [portable PDB documentation](https://github.com/OmniSharp/omnisharp-vscode/wiki/Portable-PDBs#how-to-generate-portable-pdbs) for more information. +* **Debug vs. Release**: It is much easier to debug code which has been compiled in the 'Debug' configuration. So unless the issue you are looking at only reproduces with optimizations, it is much better to use Debug bits. If you do need to debug optimized code, you will need to disable [justMyCode](#just-my-code) in launch.json. ####More things to configure In launch.json #####Just My Code @@ -126,7 +108,7 @@ You can optionally configure a file by file mapping by providing map following t #####Symbol Path You can optionally provide paths to symbols following this schema: - "symbolPath":"[ \"/Volumes/symbols\"]" + "symbolPath": [ "/Volumes/symbols" ] #####Environment variables Environment variables may be passed to your program using this schema: From 120212f47dd9ec79a55a9c7bc9cea702586fbd7e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Wed, 25 Jan 2017 12:36:38 -0800 Subject: [PATCH 158/192] Use environment variables to test bitness of Windows --- src/platform.ts | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/src/platform.ts b/src/platform.ts index 3720cfd056..006657c359 100644 --- a/src/platform.ts +++ b/src/platform.ts @@ -167,27 +167,14 @@ export class PlatformInformation { } private static GetWindowsArchitecture(): Promise { - return util.execChildProcess('wmic os get osarchitecture') - .then(architecture => { - if (architecture) { - let archArray: string[] = architecture.split(os.EOL); - if (archArray.length >= 2) { - let arch = archArray[1].trim(); - - // Note: This string can be localized. So, we'll just check to see if it contains 32 or 64. - if (arch.indexOf('64') >= 0) { - return "x86_64"; - } - else if (arch.indexOf('32') >= 0) { - return "x86"; - } - } - } - - return unknown; - }).catch((error) => { - return unknown; - }); + return new Promise((resolve, reject) => { + if (process.env.PROCESSOR_ARCHITECTURE === 'x86' && process.env.PROCESSOR_ARCHITEW6432 === undefined) { + resolve('x86'); + } + else { + resolve('x86_64'); + } + }); } private static GetUnixArchitecture(): Promise { From acef04b37be7739966ab81732005742129d4e298 Mon Sep 17 00:00:00 2001 From: Thaina Yu Date: Fri, 27 Jan 2017 22:57:23 +0700 Subject: [PATCH 159/192] Update package.json --- package.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index aa6f8f220f..2f3312eda1 100644 --- a/package.json +++ b/package.json @@ -394,14 +394,14 @@ } ], "jsonValidation": [ - { - "fileMatch": "project.json", - "url": "http://json.schemastore.org/project" - }, - { - "fileMatch": "omnisharp.json", - "url": "http://json.schemastore.org/omnisharp" - } + { + "fileMatch": "project.json", + "url": "http://json.schemastore.org/project" + }, + { + "fileMatch": "omnisharp.json", + "url": "http://json.schemastore.org/omnisharp" + } ], "commands": [ { From 7ed2e006140c0a4b44ac800e330bca5b6aabf005 Mon Sep 17 00:00:00 2001 From: Thaina Yu Date: Fri, 27 Jan 2017 23:01:09 +0700 Subject: [PATCH 160/192] Update package.json --- package.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 2f3312eda1..b4b74b3320 100644 --- a/package.json +++ b/package.json @@ -394,14 +394,14 @@ } ], "jsonValidation": [ - { - "fileMatch": "project.json", - "url": "http://json.schemastore.org/project" - }, - { - "fileMatch": "omnisharp.json", - "url": "http://json.schemastore.org/omnisharp" - } + { + "fileMatch": "project.json", + "url": "http://json.schemastore.org/project" + }, + { + "fileMatch": "omnisharp.json", + "url": "http://json.schemastore.org/omnisharp" + } ], "commands": [ { From c006ba58ec446aa86ba57e04002a69fabcaa8b51 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 27 Jan 2017 10:12:46 -0800 Subject: [PATCH 161/192] Don't add unnecessary noise to the OmniSharp log --- src/omnisharp/server.ts | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/src/omnisharp/server.ts b/src/omnisharp/server.ts index cf4f886a27..f367f2c450 100644 --- a/src/omnisharp/server.ts +++ b/src/omnisharp/server.ts @@ -13,6 +13,7 @@ import { DelayTracker } from './delayTracker'; import { LaunchTarget, findLaunchTargets } from './launcher'; import { Request, RequestQueueCollection } from './requestQueue'; import TelemetryReporter from 'vscode-extension-telemetry'; +import * as os from 'os'; import * as path from 'path'; import * as protocol from './protocol'; import * as vscode from 'vscode'; @@ -539,7 +540,7 @@ export class OmniSharpServer { if (packet.Event === 'log') { const entry = <{ LogLevel: string; Name: string; Message: string; }>packet.Body; this._logOutput(entry.LogLevel, entry.Name, entry.Message); - } + } else { // fwd all other events this._fireEvent(packet.Event, packet.Body); @@ -569,13 +570,34 @@ export class OmniSharpServer { return id; } + private static getLogLevelPrefix(logLevel: string) { + switch (logLevel) { + case "TRACE": return "trce"; + case "DEBUG": return "dbug"; + case "INFORMATION": return "info"; + case "WARNING": return "warn"; + case "ERROR": return "fail"; + case "CRITICAL": return "crit"; + default: throw new Error(`Unknown log level value: ${logLevel}`); + } + } + + private _isFilterableOutput(logLevel: string, name: string, message: string) { + // filter messages like: /codecheck: 200 339ms + const timing200Pattern = /^\/[\/\w]+: 200 \d+ms/; + + return logLevel === "INFORMATION" + && name === "OmniSharp.Middleware.LoggingMiddleware" + && timing200Pattern.test(message); + } + private _logOutput(logLevel: string, name: string, message: string) { - const timing200Pattern = /^\[INFORMATION:OmniSharp.Middleware.LoggingMiddleware\] \/[\/\w]+: 200 \d+ms/; + if (this._debugMode || !this._isFilterableOutput(logLevel, name, message)) { + let output = `[${OmniSharpServer.getLogLevelPrefix(logLevel)}]: ${name}${os.EOL}${message}`; + + const newLinePlusPadding = os.EOL + " "; + output = output.replace(os.EOL, newLinePlusPadding); - const output = `[${logLevel}:${name}] ${message}`; - - // strip stuff like: /codecheck: 200 339ms - if (this._debugMode || !timing200Pattern.test(output)) { this._logger.appendLine(output); } } From 19d04e9600ec9f7a5e52a3eedfcfc103eda29602 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 30 Jan 2017 07:16:12 -0800 Subject: [PATCH 162/192] Update to latest C# TextMate grammar --- syntaxes/csharp.tmLanguage | 421 ++++++++++++++++++++++++++++++------- 1 file changed, 343 insertions(+), 78 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index d3c135acff..25a78b9484 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -141,7 +141,7 @@ - class-members + class-or-struct-members patterns @@ -161,76 +161,10 @@ include #type-declarations - - include - #event-declaration - - - include - #property-declaration - - - include - #indexer-declaration - include #field-declaration - - include - #variable-initializer - - - include - #constructor-declaration - - - include - #destructor-declaration - - - include - #operator-declaration - - - include - #conversion-operator-declaration - - - include - #method-declaration - - - include - #attribute-section - - - include - #punctuation-semicolon - - - - struct-members - - patterns - - - include - #preprocessor - - - include - #comment - - - include - #storage-modifier - - - include - #type-declarations - include #event-declaration @@ -243,10 +177,6 @@ include #indexer-declaration - - include - #field-declaration - include #variable-initializer @@ -449,10 +379,6 @@ include #verbatim-interpolated-string - - include - #literal - include #this-or-base-expression @@ -513,10 +439,18 @@ include #cast-expression + + include + #literal + include #parenthesized-expression + + include + #tuple-deconstruction-assignment + include #initializer-expression @@ -920,7 +854,7 @@ include - #class-members + #class-or-struct-members @@ -1290,7 +1224,7 @@ include - #struct-members + #class-or-struct-members @@ -1485,7 +1419,7 @@ (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) )\s+ (\g<identifier>)\s* # first field name -(?!=>|==)(?=,|;|=) +(?!=>|==)(?=,|;|=|$) beginCaptures 1 @@ -1526,6 +1460,10 @@ include #variable-initializer + + include + #class-or-struct-members + property-declaration @@ -1604,6 +1542,10 @@ include #variable-initializer + + include + #class-or-struct-members + indexer-declaration @@ -2944,6 +2886,36 @@ + + match + (?x) # match foreach (var (x, y) in ...) +(?:\b(var)\b\s*)? +(?<tuple>\((?:[^\(\)]|\g<tuple>)+\))\s+ +\b(in)\b + captures + + 1 + + name + keyword.other.var.cs + + 2 + + patterns + + + include + #tuple-declaration-deconstruction-element-list + + + + 3 + + name + keyword.control.loop.in.cs + + + include #expression @@ -3304,6 +3276,10 @@ include #local-variable-declaration + + include + #local-tuple-var-deconstruction + local-variable-declaration @@ -3444,6 +3420,224 @@ + local-tuple-var-deconstruction + + begin + (?x) # e.g. var (x, y) = GetPoint(); +(?:\b(var)\b\s*) +(?<tuple>\((?:[^\(\)]|\g<tuple>)+\))\s* +(?=;|=|\)) + beginCaptures + + 1 + + name + keyword.other.var.cs + + 2 + + patterns + + + include + #tuple-declaration-deconstruction-element-list + + + + + end + (?=;|\)) + patterns + + + include + #comment + + + include + #variable-initializer + + + + tuple-deconstruction-assignment + + match + (?x) +(?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\))\s* +(?!=>|==)(?==) + captures + + 1 + + patterns + + + include + #tuple-deconstruction-element-list + + + + + + tuple-declaration-deconstruction-element-list + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #tuple-declaration-deconstruction-element-list + + + include + #declaration-expression + + + include + #punctuation-comma + + + match + (?x) # e.g. x +\b([_[:alpha:]][_[:alnum:]]*)\b\s* +(?=[,)]) + captures + + 1 + + name + entity.name.variable.tuple-element.cs + + + + + + tuple-deconstruction-element-list + + begin + \( + beginCaptures + + 0 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #tuple-deconstruction-element-list + + + include + #declaration-expression + + + include + #punctuation-comma + + + match + (?x) # e.g. x +\b([_[:alpha:]][_[:alnum:]]*)\b\s* +(?=[,)]) + captures + + 1 + + name + variable.other.readwrite.cs + + + + + + declaration-expression + + match + (?x) # e.g. int x OR var x +(?: + \b(var)\b| + (?<type-name> + (?: + (?:(?<identifier>[_[:alpha:]][_[:alnum:]]*)\s*\:\:\s*)? # alias-qualification + (?<name-and-type-args> # identifier + type arguments (if any) + \g<identifier>\s* + (?<type-args>\s*<(?:[^<>]|\g<type-args>)+>\s*)? + ) + (?:\s*\.\s*\g<name-and-type-args>)* # Are there any more names being dotted into? + (?:\s*\*\s*)* # pointer suffix? + (?:\s*\?\s*)? # nullable suffix? + (?:\s*\[(?:\s*,\s*)*\]\s*)* # array suffix? + )| + (?<tuple>\s*\((?:[^\(\)]|\g<tuple>)+\)) + ) +)\s+ +\b(\g<identifier>)\b\s* +(?=[,)]) + captures + + 1 + + name + keyword.other.var.cs + + 2 + + patterns + + + include + #type + + + + 7 + + name + entity.name.variable.tuple-element.cs + + + checked-unchecked-expression begin @@ -3701,6 +3895,10 @@ include #verbatim-string-literal + + include + #tuple-literal + boolean-literal @@ -3890,6 +4088,73 @@ match "" + tuple-literal + + begin + (\()(?=.*[:,]) + beginCaptures + + 1 + + name + punctuation.parenthesis.open.cs + + + end + \) + endCaptures + + 0 + + name + punctuation.parenthesis.close.cs + + + patterns + + + include + #comment + + + include + #tuple-literal-element + + + include + #punctuation-comma + + + + tuple-literal-element + + begin + (?x) +(?:([_[:alpha:]][_[:alnum:]]*)\s*(:)\s*)? +(?![,)]) + beginCaptures + + 0 + + name + entity.name.variable.tuple-element.cs + + 1 + + name + punctuation.separator.colon.cs + + + end + (?=[,)]) + patterns + + + include + #expression + + + expression-operators patterns From 427da0cfe25cc35d49fdd7af74cddfcafd52a9cb Mon Sep 17 00:00:00 2001 From: Khalid Aziz Date: Tue, 31 Jan 2017 12:44:56 -0600 Subject: [PATCH 163/192] Added support for Zorin OS 12, an Ubuntu-based linux distribution (#1160) * Added support for Zorin OS to platform.ts * Updated README to show Zorin OSZorin OS support * Added break to switch in platform.ts --- README.md | 2 +- src/platform.ts | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c72b7ca84e..daad8e6795 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ See our [change log](https://github.com/OmniSharp/omnisharp-vscode/blob/master/C * Windows (64-bit only) * macOS * Ubuntu 14.04 / Linux Mint 17 / Linux Mint 18 / Elementary OS 0.3 - * Ubuntu 16.04 / Elementary OS 0.4 / Arch + * Ubuntu 16.04 / Elementary OS 0.4 / Arch / Zorin OS 12 * Ubuntu 16.10 * Debian 8.2 * CentOS 7.1 / Oracle Linux 7 diff --git a/src/platform.ts b/src/platform.ts index 006657c359..f19289f1c1 100644 --- a/src/platform.ts +++ b/src/platform.ts @@ -265,6 +265,11 @@ export class PlatformInformation { const ubuntu_16_10 = 'ubuntu.16.10-x64'; switch (distributionName) { + case 'Zorin OS': + if (distributionVersion === "12") { + return ubuntu_16_04; + } + break; case 'ubuntu': if (distributionVersion === "14.04") { // This also works for Linux Mint From 263f61a7a3e2f2dec6c6cd6d7b7b62514ee6a59f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 31 Jan 2017 11:47:12 -0800 Subject: [PATCH 164/192] Asset generator should not offer to update tasks.json if there are build tasks This fixes a logic error in the asset generator that causes the prompt to generate assets to appear even if the assets already exist. --- src/assets.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/assets.ts b/src/assets.ts index 2bc21b5d01..00e1516784 100644 --- a/src/assets.ts +++ b/src/assets.ts @@ -397,7 +397,7 @@ function getBuildOperations(tasksJsonPath: string) { let buildTasks = getBuildTasks(tasksConfiguration); - resolve({ updateTasksJson: (buildTasks.length > 0) }); + resolve({ updateTasksJson: buildTasks.length === 0 }); }); } else { From 862d3e21c265abe1afdfef6dd8ecd663eee0a65f Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 31 Jan 2017 07:28:38 -0800 Subject: [PATCH 165/192] v1.7.0-beta1 -> v1.7.0-beta2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2ac3bcbdd5..eb8f0466f8 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.7.0-beta1", + "version": "1.7.0-beta2", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", From 7d38293ebd768207af7493fc7fec94d7cdfc16f9 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 31 Jan 2017 11:16:59 -0800 Subject: [PATCH 166/192] Update Mono and OmniSharp depedencies --- package.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index eb8f0466f8..9d09586e19 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,7 @@ "runtimeDependencies": [ { "description": "Mono Runtime (Linux / x86)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.linux-x86-4.6.1.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.linux-x86-4.8.0.478.zip", "installPath": "./bin", "platforms": [ "linux" @@ -81,7 +81,7 @@ }, { "description": "Mono Runtime (Linux / x64)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.linux-x86_64-4.6.1.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.linux-x86_64-4.8.0.478.zip", "installPath": "./bin", "platforms": [ "linux" @@ -96,7 +96,7 @@ }, { "description": "Mono Runtime (macOS)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.osx-4.6.1.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/mono.osx-4.8.0.478.zip", "installPath": "./bin", "platforms": [ "darwin" @@ -108,7 +108,7 @@ }, { "description": "Mono Framework Assemblies", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/framework-4.6.1.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/framework-4.8.0.478.zip", "installPath": "./bin/framework", "platforms": [ "darwin", @@ -117,7 +117,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x86)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta22.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta23.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -128,7 +128,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x64)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta22.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta23.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -139,7 +139,7 @@ }, { "description": "OmniSharp (Mono 4.6)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta22.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta23.zip", "installPath": "./bin/omnisharp", "platforms": [ "darwin", From e010274c6bc078a10086023868be43659eaee098 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 31 Jan 2017 13:52:19 -0800 Subject: [PATCH 167/192] Updates to CHANGELOG.md for 1.7.0 so far --- CHANGELOG.md | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1381055d35..d5fbc302d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,44 @@ +## 1.7.0 _(Not Yet Released)_ + +#### Syntax Hightlighting + +* Introduced a brand new TextMate grammar written from scratch that provides much more robust C# syntax highlighting. ([#101](https://github.com/OmniSharp/omnisharp-vscode/issues/101), [#225](https://github.com/OmniSharp/omnisharp-vscode/issues/225), [#268](https://github.com/OmniSharp/omnisharp-vscode/issues/268), [#316](https://github.com/OmniSharp/omnisharp-vscode/issues/316), [#674](https://github.com/OmniSharp/omnisharp-vscode/issues/674), [#706](https://github.com/OmniSharp/omnisharp-vscode/issues/706), [#731](https://github.com/OmniSharp/omnisharp-vscode/issues/731), [#746](https://github.com/OmniSharp/omnisharp-vscode/issues/746), [#782](https://github.com/OmniSharp/omnisharp-vscode/issues/782), [#802](https://github.com/OmniSharp/omnisharp-vscode/issues/802), [#816](https://github.com/OmniSharp/omnisharp-vscode/issues/816), [#829](https://github.com/OmniSharp/omnisharp-vscode/issues/829), [#830](https://github.com/OmniSharp/omnisharp-vscode/issues/830), [#861](https://github.com/OmniSharp/omnisharp-vscode/issues/861), [#1078](https://github.com/OmniSharp/omnisharp-vscode/issues/1078), [#1084](https://github.com/OmniSharp/omnisharp-vscode/issues/1084), [#1086](https://github.com/OmniSharp/omnisharp-vscode/issues/1086), [#1091](https://github.com/OmniSharp/omnisharp-vscode/issues/1091), [#1096](https://github.com/OmniSharp/omnisharp-vscode/issues/1096), [#1097](https://github.com/OmniSharp/omnisharp-vscode/issues/1097), [#1106](https://github.com/OmniSharp/omnisharp-vscode/issues/1106), [#1115](https://github.com/OmniSharp/omnisharp-vscode/issues/1108)) +* The C# TextMate grammar has a new home! Issues and contributions are welcome at [https://github.com/dotnet/csharp-tmLanguage](https://github.com/dotnet/csharp-tmLanguage). + +### Project Support + +* Updated with the latest changes for .NET Core .csproj projects. ([omnisharp-roslyn#738](https://github.com/OmniSharp/omnisharp-roslyn/pull/738)) +* Properly handle .csproj projects in .sln files that were added via .NET CLI commands. ([omnisharp-roslyn#741](https://github.com/OmniSharp/omnisharp-roslyn/pull/741)) +* Respect `nowarn` in project.json projects ([omnisharp#734](https://github.com/OmniSharp/omnisharp-roslyn/pull/734)) _(Contributed by [@filipw](https://github.com/filipw))_ + +### Debugging + +* Enable debugger support for Zorin OS 12 ([#1160](https://github.com/OmniSharp/omnisharp-vscode/issues/1160)) _(Contributed by [@mkaziz](https://github.com/mkaziz))_ + +### C# Scripting + +* Support resolving `#r` references from the GAC ([omnisharp-roslyn#721](https://github.com/OmniSharp/omnisharp-roslyn/pull/721)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Include System.ValueTuple in C# scripts implicitly ([omnisharp-roslyn#722](https://github.com/OmniSharp/omnisharp-roslyn/pull/722)) _(Contributed by [@filipw](https://github.com/filipw))_ + +### Completion List + +* A namespace icon should be displayed for namespaces in the completion list. ([#1125](https://github.com/OmniSharp/omnisharp-vscode/issues/1124)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Add icons for several symbol kinds in the completion list, fixing many symbols that incorrectly displayed a property "wrench" icon. ([#1145](https://github.com/OmniSharp/omnisharp-vscode/issues/1145)) + +### Other Updates and Fixes + +* Add schema validation for omnisharp.json files. ([#1082](https://github.com/OmniSharp/omnisharp-vscode/pull/1082)) _(Contributed by [@Thaina](https://github.com/Thaina))_ +* Fix running and debugging of tests defined in nested classes. ([#743](https://github.com/OmniSharp/omnisharp-vscode/issues/743), [#1151](https://github.com/OmniSharp/omnisharp-vscode/issues/1151)) +* Fix error when 'tasks.json' does not contain a 'tasks' node, or contains os-specific 'tasks' nodes. ([#1140](https://github.com/OmniSharp/omnisharp-vscode/issues/1140)) +* Better detection of Windows architecture (x86 or x64) when determining extension dependencies to download. The detection logic now uses well-known environment variables rather than launching 'wmic'. ([#1110](https://github.com/OmniSharp/omnisharp-vscode/issues/1110), [#1125](https://github.com/OmniSharp/omnisharp-vscode/issues/1125)) +* Improvements to the OmniSharp Log ([#1155](https://github.com/OmniSharp/omnisharp-vscode/pull/1155)) + +### Known Issues + +* Running and debugging of tests are not supported in .csproj-based .NET Core projects. However, there will still be clickable "run test" and "debug test" indicators above test methods. ([#1100](https://github.com/OmniSharp/omnisharp-vscode/issues/1100)) +* Auto-restore for NuGet packages is not yet implemented. If you change a project file and need to restore packages, you should do so by running `dotnet restore` with the .NET CLI. Once packages are restored, you can force the C# extension to "see" the new references by restarting OmniSharp with the `OmniSharp: Restart OmniSharp` command. ([#770](https://github.com/OmniSharp/omnisharp-vscode/issues/770)) +* When opening a .csproj-based .NET Core project in VS Code, the C# extension will not activate until a C# file is opened in the editor. ([#1150](https://github.com/OmniSharp/omnisharp-vscode/issues/1150)) + ## 1.6.2 (December 24, 2016) * Fix performance issue when editing type names containing multiple generic type parameters. ([#1088](https://github.com/OmniSharp/omnisharp-vscode/issues/1088), [#1086](https://github.com/OmniSharp/omnisharp-vscode/issues/1086)) From c063a3aa310ca99381e1be7b6e8438378e717bf6 Mon Sep 17 00:00:00 2001 From: filipw Date: Wed, 1 Feb 2017 16:50:22 +0100 Subject: [PATCH 168/192] pass loglevel to omnisharp --- src/omnisharp/server.ts | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/omnisharp/server.ts b/src/omnisharp/server.ts index f367f2c450..294070dd08 100644 --- a/src/omnisharp/server.ts +++ b/src/omnisharp/server.ts @@ -235,20 +235,17 @@ export class OmniSharpServer { const solutionPath = launchTarget.target; const cwd = path.dirname(solutionPath); + this._options = Options.Read(); + let args = [ '-s', solutionPath, '--hostPID', process.pid.toString(), '--stdio', 'DotNet:enablePackageRestore=false', - '--encoding', 'utf-8' + '--encoding', 'utf-8', + '-loglevel', this._options.loggingLevel ]; - this._options = Options.Read(); - - if (this._options.loggingLevel === 'verbose') { - args.push('-v'); - } - this._logger.appendLine(`Starting OmniSharp server at ${new Date().toLocaleString()}`); this._logger.increaseIndent(); this._logger.appendLine(`Target: ${solutionPath}`); From e714369b217f6fdec15d17554c9cd24c1f985418 Mon Sep 17 00:00:00 2001 From: filipw Date: Wed, 1 Feb 2017 16:50:36 +0100 Subject: [PATCH 169/192] changed default values for logginglevel --- package.json | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 9d09586e19..7da5eec5a7 100644 --- a/package.json +++ b/package.json @@ -356,10 +356,14 @@ }, "omnisharp.loggingLevel": { "type": "string", - "default": "default", + "default": "information", "enum": [ - "default", - "verbose" + "trace", + "debug", + "information", + "warning", + "error", + "critical" ], "description": "Specifies the level of logging output from the OmniSharp server." }, From 0ec1b4633b55b5cc3bcf20c0907006ab630b5267 Mon Sep 17 00:00:00 2001 From: filipw Date: Wed, 1 Feb 2017 21:43:44 +0100 Subject: [PATCH 170/192] do not break folks using the old "verbose" level --- src/omnisharp/options.ts | 7 ++++++- src/omnisharp/server.ts | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/omnisharp/options.ts b/src/omnisharp/options.ts index 5cd72f0f3d..2f62130c51 100644 --- a/src/omnisharp/options.ts +++ b/src/omnisharp/options.ts @@ -33,7 +33,12 @@ export class Options { ? csharpConfig.get('omnisharpUsesMono') : omnisharpConfig.get('useMono'); - const loggingLevel = omnisharpConfig.get('loggingLevel'); + // support the legacy "verbose" level as "debug" + let loggingLevel = omnisharpConfig.get('loggingLevel'); + if (loggingLevel.toLowerCase() === 'verbose') { + loggingLevel = 'debug'; + } + const autoStart = omnisharpConfig.get('autoStart', true); const projectLoadTimeout = omnisharpConfig.get('projectLoadTimeout', 60); diff --git a/src/omnisharp/server.ts b/src/omnisharp/server.ts index 294070dd08..80174e7f42 100644 --- a/src/omnisharp/server.ts +++ b/src/omnisharp/server.ts @@ -243,7 +243,7 @@ export class OmniSharpServer { '--stdio', 'DotNet:enablePackageRestore=false', '--encoding', 'utf-8', - '-loglevel', this._options.loggingLevel + '--loglevel', this._options.loggingLevel ]; this._logger.appendLine(`Starting OmniSharp server at ${new Date().toLocaleString()}`); From 890360b7b24e58d8ccc1b41c8676f0b9b0047049 Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Wed, 1 Feb 2017 14:10:32 -0800 Subject: [PATCH 171/192] Rajkumar42/wsl/pipefix (#1170) Adding sysnative to path if running in windows for remote pipe launch --- src/features/processPicker.ts | 59 +++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 20 deletions(-) diff --git a/src/features/processPicker.ts b/src/features/processPicker.ts index bd99d40b2c..d0604a0271 100644 --- a/src/features/processPicker.ts +++ b/src/features/processPicker.ts @@ -6,6 +6,7 @@ import * as os from 'os'; import * as vscode from 'vscode'; import * as child_process from 'child_process'; +import { PlatformInformation } from '../platform'; export interface AttachItem extends vscode.QuickPickItem { id: string; @@ -389,33 +390,51 @@ function execChildProcess(process: string, workingDirectory: string): Promise { + return PlatformInformation.GetCurrent().then(platformInfo => { + let env = process.env; + if (platformInfo.isWindows && platformInfo.architecture === "x86_64") { + let sysnative : String = process.env.WINDIR + "\\sysnative"; + env.Path = process.env.PATH + ";" + sysnative; + } + + return env; + }); +} + function execChildProcessAndOutputErrorToChannel(process: string, workingDirectory: string, channel: vscode.OutputChannel): Promise { - channel.appendLine(`Executing: ${process}`); + channel.appendLine(`Executing: ${process}`); + return new Promise((resolve, reject) => { - child_process.exec(process, { cwd: workingDirectory, maxBuffer: 500 * 1024 }, (error: Error, stdout: string, stderr: string) => { - let channelOutput = ""; - - if (stdout && stdout.length > 0) { - channelOutput.concat(stdout); - } + return GetSysNativePathIfNeeded().then(newEnv => { + child_process.exec(process, { cwd: workingDirectory, env: newEnv, maxBuffer: 500 * 1024 }, (error: Error, stdout: string, stderr: string) => { + let channelOutput = ""; + + if (stdout && stdout.length > 0) { + channelOutput.concat(stdout); + } - if (stderr && stderr.length > 0) { - channelOutput.concat(stderr); - } + if (stderr && stderr.length > 0) { + channelOutput.concat(stderr); + } - if (error) { - channelOutput.concat(error.message); - } + if (error) { + channelOutput.concat(error.message); + } - if (error || (stderr && stderr.length > 0)) { - channel.append(channelOutput); - channel.show(); - reject(new Error("See remote-attach output")); - return; - } + if (error || (stderr && stderr.length > 0)) { + channel.append(channelOutput); + channel.show(); + reject(new Error("See remote-attach output")); + return; + } - resolve(stdout); + resolve(stdout); + }); }); }); From c9a970cd73bf46b065f3c3a7b860eb143062f20b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 04:46:46 -0800 Subject: [PATCH 172/192] .NET: Restore Packages command should work with .NET Core .csproj projects --- package.json | 2 +- src/assets.ts | 8 ++------ src/features/commands.ts | 8 +++++--- src/omnisharp/protocol.ts | 31 +++++++++++++++++++++++++++++++ 4 files changed, 39 insertions(+), 10 deletions(-) diff --git a/package.json b/package.json index 9d09586e19..17fafc9513 100644 --- a/package.json +++ b/package.json @@ -421,7 +421,7 @@ { "command": "dotnet.restore", "title": "Restore Packages", - "category": "dotnet" + "category": ".NET" }, { "command": "csharp.downloadDebugger", diff --git a/src/assets.ts b/src/assets.ts index 00e1516784..fc84f874b3 100644 --- a/src/assets.ts +++ b/src/assets.ts @@ -95,7 +95,7 @@ export class AssetGenerator { this.hasProject = true; this.projectPath = path.dirname(targetMSBuildProject.Path); this.projectFilePath = targetMSBuildProject.Path; - this.targetFramework = findNetCoreAppTargetFramework(targetMSBuildProject).ShortName; + this.targetFramework = protocol.findNetCoreAppTargetFramework(targetMSBuildProject).ShortName; this.executableName = targetMSBuildProject.AssemblyName + ".dll"; this.configurationName = configurationName; return; @@ -284,15 +284,11 @@ export class AssetGenerator { } } -function findNetCoreAppTargetFramework(project: protocol.MSBuildProject): protocol.TargetFramework { - return project.TargetFrameworks.find(tf => tf.ShortName.startsWith('netcoreapp')); -} - function findExecutableMSBuildProjects(projects: protocol.MSBuildProject[]) { let result: protocol.MSBuildProject[] = []; projects.forEach(project => { - if (project.IsExe && findNetCoreAppTargetFramework(project) !== undefined) { + if (project.IsExe && protocol.findNetCoreAppTargetFramework(project) !== undefined) { result.push(project); } }); diff --git a/src/features/commands.ts b/src/features/commands.ts index 3c239446b2..b20584f061 100644 --- a/src/features/commands.ts +++ b/src/features/commands.ts @@ -83,7 +83,7 @@ interface Command { execute(): Thenable; } -function projectsToCommands(projects: protocol.DotNetProject[]): Promise[] { +function projectsToCommands(projects: protocol.ProjectDescriptor[]): Promise[] { return projects.map(project => { let projectDirectory = project.Path; @@ -117,11 +117,13 @@ export function dotnetRestoreAllProjects(server: OmniSharpServer) { return serverUtils.requestWorkspaceInformation(server).then(info => { - if (!info.DotNet || info.DotNet.Projects.length < 1) { + let projectDescriptors = protocol.getDotNetCoreProjectDescriptors(info); + + if (projectDescriptors.length === 0) { return Promise.reject("No .NET Core projects found"); } - let commandPromises = projectsToCommands(info.DotNet.Projects); + let commandPromises = projectsToCommands(projectDescriptors); return Promise.all(commandPromises).then(commands => { return vscode.window.showQuickPick(commands); diff --git a/src/omnisharp/protocol.ts b/src/omnisharp/protocol.ts index 351651c31e..c762a387de 100644 --- a/src/omnisharp/protocol.ts +++ b/src/omnisharp/protocol.ts @@ -5,6 +5,8 @@ 'use strict'; +import * as path from 'path'; + export module Requests { export const AddToProject = '/addtoproject'; export const AutoComplete = '/autocomplete'; @@ -509,4 +511,33 @@ export namespace V2 { Failure: string; Pass: boolean; } +} + +export function findNetCoreAppTargetFramework(project: MSBuildProject): TargetFramework { + return project.TargetFrameworks.find(tf => tf.ShortName.startsWith('netcoreapp')); +} + +export interface ProjectDescriptor { + Name: string; + Path: string; +} + +export function getDotNetCoreProjectDescriptors(info: WorkspaceInformationResponse): ProjectDescriptor[] { + let result = []; + + if (info.DotNet && info.DotNet.Projects.length > 0) { + for (let project of info.DotNet.Projects) { + result.push({ Name: project.Name, Path: project.Path }); + } + } + + if (info.MsBuild && info.MsBuild.Projects.length > 0) { + for (let project of info.MsBuild.Projects) { + if (findNetCoreAppTargetFramework(project) !== undefined) { + result.push({ Name: path.basename(project.Path), Path: project.Path }); + } + } + } + + return result; } \ No newline at end of file From 214c6cda5ae2cf682273fc55fc85bf3cf6a98316 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 04:51:51 -0800 Subject: [PATCH 173/192] Also also Restore Command to work for netstandard --- src/omnisharp/protocol.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/omnisharp/protocol.ts b/src/omnisharp/protocol.ts index c762a387de..0570c7a2a5 100644 --- a/src/omnisharp/protocol.ts +++ b/src/omnisharp/protocol.ts @@ -517,6 +517,10 @@ export function findNetCoreAppTargetFramework(project: MSBuildProject): TargetFr return project.TargetFrameworks.find(tf => tf.ShortName.startsWith('netcoreapp')); } +export function findNetStandardTargetFramework(project: MSBuildProject): TargetFramework { + return project.TargetFrameworks.find(tf => tf.ShortName.startsWith('netstandard')); +} + export interface ProjectDescriptor { Name: string; Path: string; @@ -533,7 +537,8 @@ export function getDotNetCoreProjectDescriptors(info: WorkspaceInformationRespon if (info.MsBuild && info.MsBuild.Projects.length > 0) { for (let project of info.MsBuild.Projects) { - if (findNetCoreAppTargetFramework(project) !== undefined) { + if (findNetCoreAppTargetFramework(project) !== undefined || + findNetStandardTargetFramework(project) !== undefined) { result.push({ Name: path.basename(project.Path), Path: project.Path }); } } From e6eaccdc17a94d7fec55b3fe01708c74194acd5e Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 06:11:30 -0800 Subject: [PATCH 174/192] Update OmniSharp to v1.9-beta24 --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 17fafc9513..315ec22f4e 100644 --- a/package.json +++ b/package.json @@ -117,7 +117,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x86)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta23.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta24.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -128,7 +128,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x64)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta23.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta24.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -139,7 +139,7 @@ }, { "description": "OmniSharp (Mono 4.6)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta23.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta24.zip", "installPath": "./bin/omnisharp", "platforms": [ "darwin", From d5ca4d6915a68f1c60686e8dddafe6146df78445 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 06:27:37 -0800 Subject: [PATCH 175/192] v1.7.0-beta2 -> v1.7.0-beta3 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2f862efd12..c03a81f335 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.7.0-beta2", + "version": "1.7.0-beta3", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", From f6c68b958a1130658f993c9c00ab83b3949858fe Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 06:33:34 -0800 Subject: [PATCH 176/192] Update changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d5fbc302d5..3218543f95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ * Updated with the latest changes for .NET Core .csproj projects. ([omnisharp-roslyn#738](https://github.com/OmniSharp/omnisharp-roslyn/pull/738)) * Properly handle .csproj projects in .sln files that were added via .NET CLI commands. ([omnisharp-roslyn#741](https://github.com/OmniSharp/omnisharp-roslyn/pull/741)) * Respect `nowarn` in project.json projects ([omnisharp#734](https://github.com/OmniSharp/omnisharp-roslyn/pull/734)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Correctly update project when dotnet restore is performed on a .NET Core .csproj project. ([#1114](https://github.com/OmniSharp/omnisharp-vscode/issues/1114)) ### Debugging @@ -32,6 +33,8 @@ * Fix error when 'tasks.json' does not contain a 'tasks' node, or contains os-specific 'tasks' nodes. ([#1140](https://github.com/OmniSharp/omnisharp-vscode/issues/1140)) * Better detection of Windows architecture (x86 or x64) when determining extension dependencies to download. The detection logic now uses well-known environment variables rather than launching 'wmic'. ([#1110](https://github.com/OmniSharp/omnisharp-vscode/issues/1110), [#1125](https://github.com/OmniSharp/omnisharp-vscode/issues/1125)) * Improvements to the OmniSharp Log ([#1155](https://github.com/OmniSharp/omnisharp-vscode/pull/1155)) +* Add new values to the `omnisharp.logginglevel` option to allow more granualar control of OmniSharp logging. ([#993](https://github.com/OmniSharp/omnisharp-vscode/issues/993)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Fix `dotnet restore` Visual Studio Code command to handle .csproj .NET Core projects. ([#1175](https://github.com/OmniSharp/omnisharp-vscode/issues/1175)) ### Known Issues From d677a718ce12d92791ae05f044ebe9f97670c5e6 Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Fri, 3 Feb 2017 14:44:00 -0800 Subject: [PATCH 177/192] Adding WSL support to changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3218543f95..d1c4e6c295 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ ### Debugging * Enable debugger support for Zorin OS 12 ([#1160](https://github.com/OmniSharp/omnisharp-vscode/issues/1160)) _(Contributed by [@mkaziz](https://github.com/mkaziz))_ +* Added off-road support for [Windows Subsystem for Linux](https://blogs.msdn.microsoft.com/wsl/2016/04/22/windows-subsystem-for-linux-overview/) (NOTE: requires newer version of Windows than have been publicly released yet) ### C# Scripting From 4e1a2ace60aed422f6efec3856e95027567e3277 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Fri, 3 Feb 2017 15:22:51 -0800 Subject: [PATCH 178/192] Fix 'dotnet restore' when the extension tries to run it for a specific project --- src/features/commands.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/features/commands.ts b/src/features/commands.ts index b20584f061..893ef52496 100644 --- a/src/features/commands.ts +++ b/src/features/commands.ts @@ -143,14 +143,16 @@ export function dotnetRestoreForProject(server: OmniSharpServer, fileName: strin return serverUtils.requestWorkspaceInformation(server).then(info => { - if (!info.DotNet || info.DotNet.Projects.length < 1) { + let projectDescriptors = protocol.getDotNetCoreProjectDescriptors(info); + + if (projectDescriptors.length === 0) { return Promise.reject("No .NET Core projects found"); } let directory = path.dirname(fileName); - for (let project of info.DotNet.Projects) { - if (project.Path === directory) { + for (let projectDescriptor of projectDescriptors) { + if (projectDescriptor.Path === directory) { return dotnetRestore(directory, fileName); } } From 25a394a9667f44171c373eda5c68b45fad70c5f1 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Sun, 5 Feb 2017 08:37:25 -0800 Subject: [PATCH 179/192] Take latest C# TextMate grammar, fixing a few bugs --- syntaxes/csharp.tmLanguage | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/syntaxes/csharp.tmLanguage b/syntaxes/csharp.tmLanguage index 25a78b9484..97bc5501ea 100644 --- a/syntaxes/csharp.tmLanguage +++ b/syntaxes/csharp.tmLanguage @@ -1324,6 +1324,10 @@ include #punctuation-comma + + include + #preprocessor + generic-constraints @@ -2246,7 +2250,7 @@ end - (?=[,\);}]) + (?=[,\)\];}]) patterns @@ -4869,7 +4873,7 @@ match - \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,\]]) + \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[=,\]]) captures 1 @@ -4935,7 +4939,7 @@ match - \s+([_[:alpha:]][_[:alnum:]]*)\s*(?=[,)]) + \b([_[:alpha:]][_[:alnum:]]*)\s*(?=[=,)]) captures 1 @@ -5532,7 +5536,7 @@ end - (?=\)|;) + (?=\)|;|}) patterns @@ -5575,7 +5579,7 @@ end - (?=\)|;) + (?=\)|;|}) patterns @@ -5607,7 +5611,7 @@ end - (?=\)|;) + (?=\)|;|}) patterns From 1e262ce4f11cdd1757ee533134d5f3f8c49cbfcd Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 06:55:21 -0800 Subject: [PATCH 180/192] Update build task to launch "npm run compile" This uses the new tasks.json support in VS Code 1.9 to allow tasks to run different commands. So, now you should be able to build with Cmd+Shift+B in VS Code. --- .vscode/tasks.json | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 8474a73013..ee3eb7a123 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,8 +1,14 @@ { "version": "0.1.0", - "command": "gulp", - "isShellCommand": true, "tasks": [ + { + "taskName": "build", + "command": "npm", + "isShellCommand": true, + "args": ["run", "compile"], + "showOutput": "always", + "isBuildCommand": true + }, { "taskName": "test", "showOutput": "always", @@ -10,7 +16,9 @@ }, { "taskName": "tslint", - "args": [], + "command": "gulp", + "isShellCommand": true, + "args": ["tslint"], "problemMatcher": { "owner": "tslint", "fileLocation": [ From 37d44a62c2f6f378d6c17614e384282d560eba88 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 07:55:31 -0800 Subject: [PATCH 181/192] Fix silly bug in dotnet restore --- src/features/commands.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/features/commands.ts b/src/features/commands.ts index 893ef52496..a2896b968b 100644 --- a/src/features/commands.ts +++ b/src/features/commands.ts @@ -152,7 +152,7 @@ export function dotnetRestoreForProject(server: OmniSharpServer, fileName: strin let directory = path.dirname(fileName); for (let projectDescriptor of projectDescriptors) { - if (projectDescriptor.Path === directory) { + if (projectDescriptor.Path === fileName) { return dotnetRestore(directory, fileName); } } From 48489392b9c14d9eb8007d4ad83caef73d77bc83 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 09:00:12 -0800 Subject: [PATCH 182/192] Update 'test' task to print message describing how to run tests and delete gulp task for running tests Since we use VS Code's infrastructure for running tests (allowing us to access VS Code objects inside tests), the right way to run tests inside VS Code is to launch the debugger with the 'Launch Tests' configuration. At the command-line, the 'npm test' command runs tests. However, that downloads a build of VS Code to run the tests, which doesn't support being launched while other instances of VS Code or open. --- .vscode/tasks.json | 3 +++ gulpfile.js | 12 ------------ 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ee3eb7a123..19e4004654 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -11,7 +11,10 @@ }, { "taskName": "test", + "command": "echo", "showOutput": "always", + "isShellCommand": true, + "args": ["Run tests in VS Code by launching the debugg with the 'Launch Tests' configuration."], "isTestCommand": true }, { diff --git a/gulpfile.js b/gulpfile.js index 0eb960b7bf..b668c0dcb7 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -145,18 +145,6 @@ gulp.task('package:offline', ['clean'], () => { return promise; }); -/// Test Task -gulp.task('test', () => { - gulp.src('out/test/**/*.tests.js') - .pipe(mocha({ ui: "tdd" })) - .once('error', () => { - process.exit(1); - }) - .once('end', () => { - process.exit(); - }); -}); - /// Misc Tasks const allTypeScript = [ 'src/**/*.ts', From a881a97797bdefd20aa71933b6f6a3c68dd27f50 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 09:42:31 -0800 Subject: [PATCH 183/192] Don't show message about projects having trouble loading when there are only warnings C# for VS Code is a little over-excited about showing the message that there were problems loading projects. Essentially, it will show it if there were warnings or errors. This changes that behavior to only show it if there are errors because it's perfectly reasonable for a project to contain warnings and not complain to the user every time they open it. --- src/features/status.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/features/status.ts b/src/features/status.ts index a28109a325..718a31dc22 100644 --- a/src/features/status.ts +++ b/src/features/status.ts @@ -241,7 +241,10 @@ export function reportServerStatus(server: OmniSharpServer): vscode.Disposable{ message.Errors.forEach(error => asErrorMessage); message.Warnings.forEach(warning => asWarningMessage); appendLine(); - showMessageSoon(); + + if (message.Errors.length > 0) { + showMessageSoon(); + } } }); From b579ee4fa0b182bf0dd1b493340a1f742dcb7aa3 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 10:03:54 -0800 Subject: [PATCH 184/192] v1.7.0-beta3 -> v1.7.0-beta4 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c03a81f335..d5ec9f4b48 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.7.0-beta3", + "version": "1.7.0-beta4", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", From 30dc04d2a8c2166e3c0f16247c837eebf75e8ff6 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 10:05:09 -0800 Subject: [PATCH 185/192] Update to latest OmniSharp --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index d5ec9f4b48..7ae0522416 100644 --- a/package.json +++ b/package.json @@ -117,7 +117,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x86)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta24.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x86-1.9-beta25.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -128,7 +128,7 @@ }, { "description": "OmniSharp (.NET 4.6 / x64)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta24.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-win-x64-1.9-beta25.zip", "installPath": "./bin/omnisharp", "platforms": [ "win32" @@ -139,7 +139,7 @@ }, { "description": "OmniSharp (Mono 4.6)", - "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta24.zip", + "url": "https://omnisharpdownload.blob.core.windows.net/ext/omnisharp-mono-1.9-beta25.zip", "installPath": "./bin/omnisharp", "platforms": [ "darwin", From 2d3c1e8cd89742e1e3a313456b878b97cd763b46 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 10:25:11 -0800 Subject: [PATCH 186/192] Update changelog --- CHANGELOG.md | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3218543f95..7cf17a2ab3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,18 +8,28 @@ ### Project Support * Updated with the latest changes for .NET Core .csproj projects. ([omnisharp-roslyn#738](https://github.com/OmniSharp/omnisharp-roslyn/pull/738)) -* Properly handle .csproj projects in .sln files that were added via .NET CLI commands. ([omnisharp-roslyn#741](https://github.com/OmniSharp/omnisharp-roslyn/pull/741)) -* Respect `nowarn` in project.json projects ([omnisharp#734](https://github.com/OmniSharp/omnisharp-roslyn/pull/734)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Automatic package restore and out-of-date notifications implemented for .NET Core .csproj projects. ([#770](https://github.com/OmniSharp/omnisharp-vscode/issues/770)) * Correctly update project when dotnet restore is performed on a .NET Core .csproj project. ([#1114](https://github.com/OmniSharp/omnisharp-vscode/issues/1114)) +* Properly handle .csproj projects in .sln files that were added via .NET CLI commands. ([omnisharp-roslyn#741](https://github.com/OmniSharp/omnisharp-roslyn/pull/741)) +* Fix `dotnet restore` Visual Studio Code command to execute for .csproj .NET Core projects. ([#1175](https://github.com/OmniSharp/omnisharp-vscode/issues/1175)) +* Respect `nowarn` in project.json projects. ([omnisharp#734](https://github.com/OmniSharp/omnisharp-roslyn/pull/734)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Fix problem with project.json projects that wrap assemblies. ([#422](https://github.com/OmniSharp/omnisharp-vscode/issues/424)) ### Debugging -* Enable debugger support for Zorin OS 12 ([#1160](https://github.com/OmniSharp/omnisharp-vscode/issues/1160)) _(Contributed by [@mkaziz](https://github.com/mkaziz))_ +* Enable debugger support for Zorin OS 12. ([#1160](https://github.com/OmniSharp/omnisharp-vscode/issues/1160)) _(Contributed by [@mkaziz](https://github.com/mkaziz))_ ### C# Scripting -* Support resolving `#r` references from the GAC ([omnisharp-roslyn#721](https://github.com/OmniSharp/omnisharp-roslyn/pull/721)) _(Contributed by [@filipw](https://github.com/filipw))_ -* Include System.ValueTuple in C# scripts implicitly ([omnisharp-roslyn#722](https://github.com/OmniSharp/omnisharp-roslyn/pull/722)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Support resolving `#r` references from the GAC. ([omnisharp-roslyn#721](https://github.com/OmniSharp/omnisharp-roslyn/pull/721)) _(Contributed by [@filipw](https://github.com/filipw))_ +* Include System.ValueTuple in C# scripts implicitly. ([omnisharp-roslyn#722](https://github.com/OmniSharp/omnisharp-roslyn/pull/722)) _(Contributed by [@filipw](https://github.com/filipw))_ + +### Code Actions + +* Fixed code actions that add files, such as "Move Type to File". ([#975](https://github.com/OmniSharp/omnisharp-vscode/issues/975)) +* Properly surface code actions that have "nested code actions". This allows "generate type" to work properly. ([#302](https://github.com/OmniSharp/omnisharp-vscode/issues/302)) +* Don't display the Remove Unnecessary Usings code action unless it is relevant. ([omnisharp-roslyn#742](https://github.com/OmniSharp/omnisharp-roslyn/issues/742)) +* Don't show the Extract Interface refactoring as it requires a dialog that does not exist in VS Code. ([#925](https://github.com/OmniSharp/omnisharp-vscode/issues/925)) ### Completion List @@ -29,17 +39,18 @@ ### Other Updates and Fixes * Add schema validation for omnisharp.json files. ([#1082](https://github.com/OmniSharp/omnisharp-vscode/pull/1082)) _(Contributed by [@Thaina](https://github.com/Thaina))_ +* Add support for auto-closing and surrounding characters. ([#749](https://github.com/OmniSharp/omnisharp-vscode/issues/749), [#842](https://github.com/OmniSharp/omnisharp-vscode/issues/842)) _(Contributed by [@filipw](https://github.com/filipw))_ * Fix running and debugging of tests defined in nested classes. ([#743](https://github.com/OmniSharp/omnisharp-vscode/issues/743), [#1151](https://github.com/OmniSharp/omnisharp-vscode/issues/1151)) * Fix error when 'tasks.json' does not contain a 'tasks' node, or contains os-specific 'tasks' nodes. ([#1140](https://github.com/OmniSharp/omnisharp-vscode/issues/1140)) * Better detection of Windows architecture (x86 or x64) when determining extension dependencies to download. The detection logic now uses well-known environment variables rather than launching 'wmic'. ([#1110](https://github.com/OmniSharp/omnisharp-vscode/issues/1110), [#1125](https://github.com/OmniSharp/omnisharp-vscode/issues/1125)) * Improvements to the OmniSharp Log ([#1155](https://github.com/OmniSharp/omnisharp-vscode/pull/1155)) * Add new values to the `omnisharp.logginglevel` option to allow more granualar control of OmniSharp logging. ([#993](https://github.com/OmniSharp/omnisharp-vscode/issues/993)) _(Contributed by [@filipw](https://github.com/filipw))_ -* Fix `dotnet restore` Visual Studio Code command to handle .csproj .NET Core projects. ([#1175](https://github.com/OmniSharp/omnisharp-vscode/issues/1175)) +* Don't display the "some projects have trouble loading" message if projects only contain warnings. ([#707](https://github.com/OmniSharp/omnisharp-vscode/issues/707)) +* Update Mono detection logic to succeed even if another shell is set as the default (e.g. zsh). ([#1031](https://github.com/OmniSharp/omnisharp-vscode/issues/1031)) ### Known Issues * Running and debugging of tests are not supported in .csproj-based .NET Core projects. However, there will still be clickable "run test" and "debug test" indicators above test methods. ([#1100](https://github.com/OmniSharp/omnisharp-vscode/issues/1100)) -* Auto-restore for NuGet packages is not yet implemented. If you change a project file and need to restore packages, you should do so by running `dotnet restore` with the .NET CLI. Once packages are restored, you can force the C# extension to "see" the new references by restarting OmniSharp with the `OmniSharp: Restart OmniSharp` command. ([#770](https://github.com/OmniSharp/omnisharp-vscode/issues/770)) * When opening a .csproj-based .NET Core project in VS Code, the C# extension will not activate until a C# file is opened in the editor. ([#1150](https://github.com/OmniSharp/omnisharp-vscode/issues/1150)) ## 1.6.2 (December 24, 2016) From 17cc67f8fa408d83b792219316319bd2d8ba90f1 Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Mon, 6 Feb 2017 11:27:21 -0800 Subject: [PATCH 187/192] Incorporating code review comments --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1c4e6c295..ba6e08408b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,8 @@ * Enable debugger support for Zorin OS 12 ([#1160](https://github.com/OmniSharp/omnisharp-vscode/issues/1160)) _(Contributed by [@mkaziz](https://github.com/mkaziz))_ * Added off-road support for [Windows Subsystem for Linux](https://blogs.msdn.microsoft.com/wsl/2016/04/22/windows-subsystem-for-linux-overview/) (NOTE: requires newer version of Windows than have been publicly released yet) +* Fixed issue with debugger pause and multithreaded call stacks ([#1107](https://github.com/OmniSharp/omnisharp-vscode/issues/1107) and [#1105](https://github.com/OmniSharp/omnisharp-vscode/issues/1105)) + ### C# Scripting From 7c36c41af0c934470a1d4a49e593b13f9499201b Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Mon, 6 Feb 2017 11:29:15 -0800 Subject: [PATCH 188/192] Fix changelog typo --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cf17a2ab3..825f8d0053 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ * Properly handle .csproj projects in .sln files that were added via .NET CLI commands. ([omnisharp-roslyn#741](https://github.com/OmniSharp/omnisharp-roslyn/pull/741)) * Fix `dotnet restore` Visual Studio Code command to execute for .csproj .NET Core projects. ([#1175](https://github.com/OmniSharp/omnisharp-vscode/issues/1175)) * Respect `nowarn` in project.json projects. ([omnisharp#734](https://github.com/OmniSharp/omnisharp-roslyn/pull/734)) _(Contributed by [@filipw](https://github.com/filipw))_ -* Fix problem with project.json projects that wrap assemblies. ([#422](https://github.com/OmniSharp/omnisharp-vscode/issues/424)) +* Fix problem with project.json projects that wrap assemblies. ([#424](https://github.com/OmniSharp/omnisharp-vscode/issues/424)) ### Debugging From 819a271d75d25c8b2adad51cc0153a13b58e12e6 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 7 Feb 2017 12:43:07 -0800 Subject: [PATCH 189/192] Fix dotnet restore for project.json In fixing dotnet restore for .csproj, I unintentionally broke it for project.json. The problem here is that OmniSharp returns different information for `Path` with regard to project.json or .csproj. For project.json, it's the directory that the project.json lives in, and for .csproj, it's the actually file path. This change addresses that by adding a `FilePath` property to our `ProjectDescriptor` which represents the real file path and renamed `Path` to `Directory` to be a bit clearer. --- src/features/commands.ts | 30 ++++++++++++++---------------- src/omnisharp/protocol.ts | 15 ++++++++++++--- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/src/features/commands.ts b/src/features/commands.ts index a2896b968b..4aa22400b9 100644 --- a/src/features/commands.ts +++ b/src/features/commands.ts @@ -85,7 +85,7 @@ interface Command { function projectsToCommands(projects: protocol.ProjectDescriptor[]): Promise[] { return projects.map(project => { - let projectDirectory = project.Path; + let projectDirectory = project.Directory; return new Promise((resolve, reject) => { fs.lstat(projectDirectory, (err, stats) => { @@ -98,7 +98,7 @@ function projectsToCommands(projects: protocol.ProjectDescriptor[]): Promise { - let projectDescriptors = protocol.getDotNetCoreProjectDescriptors(info); + let descriptors = protocol.getDotNetCoreProjectDescriptors(info); - if (projectDescriptors.length === 0) { + if (descriptors.length === 0) { return Promise.reject("No .NET Core projects found"); } - let commandPromises = projectsToCommands(projectDescriptors); + let commandPromises = projectsToCommands(descriptors); return Promise.all(commandPromises).then(commands => { return vscode.window.showQuickPick(commands); @@ -135,7 +135,7 @@ export function dotnetRestoreAllProjects(server: OmniSharpServer) { }); } -export function dotnetRestoreForProject(server: OmniSharpServer, fileName: string) { +export function dotnetRestoreForProject(server: OmniSharpServer, filePath: string) { if (!server.isRunning()) { return Promise.reject('OmniSharp server is not running.'); @@ -143,23 +143,21 @@ export function dotnetRestoreForProject(server: OmniSharpServer, fileName: strin return serverUtils.requestWorkspaceInformation(server).then(info => { - let projectDescriptors = protocol.getDotNetCoreProjectDescriptors(info); + let descriptors = protocol.getDotNetCoreProjectDescriptors(info); - if (projectDescriptors.length === 0) { + if (descriptors.length === 0) { return Promise.reject("No .NET Core projects found"); } - let directory = path.dirname(fileName); - - for (let projectDescriptor of projectDescriptors) { - if (projectDescriptor.Path === fileName) { - return dotnetRestore(directory, fileName); + for (let descriptor of descriptors) { + if (descriptor.FilePath === filePath) { + return dotnetRestore(descriptor.Directory, filePath); } } }); } -function dotnetRestore(cwd: string, fileName?: string) { +function dotnetRestore(cwd: string, filePath?: string) { return new Promise((resolve, reject) => { channel.clear(); channel.show(); @@ -167,8 +165,8 @@ function dotnetRestore(cwd: string, fileName?: string) { let cmd = 'dotnet'; let args = ['restore']; - if (fileName) { - args.push(fileName); + if (filePath) { + args.push(filePath); } let dotnet = cp.spawn(cmd, args, { cwd: cwd, env: process.env }); diff --git a/src/omnisharp/protocol.ts b/src/omnisharp/protocol.ts index 0570c7a2a5..4fb8865bfd 100644 --- a/src/omnisharp/protocol.ts +++ b/src/omnisharp/protocol.ts @@ -523,7 +523,8 @@ export function findNetStandardTargetFramework(project: MSBuildProject): TargetF export interface ProjectDescriptor { Name: string; - Path: string; + Directory: string; + FilePath: string; } export function getDotNetCoreProjectDescriptors(info: WorkspaceInformationResponse): ProjectDescriptor[] { @@ -531,7 +532,11 @@ export function getDotNetCoreProjectDescriptors(info: WorkspaceInformationRespon if (info.DotNet && info.DotNet.Projects.length > 0) { for (let project of info.DotNet.Projects) { - result.push({ Name: project.Name, Path: project.Path }); + result.push({ + Name: project.Name, + Directory: project.Path, + FilePath: path.join(project.Path, 'project.json') + }); } } @@ -539,7 +544,11 @@ export function getDotNetCoreProjectDescriptors(info: WorkspaceInformationRespon for (let project of info.MsBuild.Projects) { if (findNetCoreAppTargetFramework(project) !== undefined || findNetStandardTargetFramework(project) !== undefined) { - result.push({ Name: path.basename(project.Path), Path: project.Path }); + result.push({ + Name: path.basename(project.Path), + Directory: path.dirname(project.Path), + FilePath: project.Path + }); } } } From 0986378ec1ca7d36e64817db484ba6183930d9da Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Tue, 7 Feb 2017 15:54:54 -0800 Subject: [PATCH 190/192] Updating the debugger to 1.7.1. (#1200) --- package.json | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/package.json b/package.json index 7ae0522416..43374b3731 100644 --- a/package.json +++ b/package.json @@ -148,8 +148,8 @@ }, { "description": ".NET Core Debugger (Windows / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-win7-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-win7-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-win7-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-win7-x64.zip", "installPath": ".debugger", "runtimeIds": [ "win7-x64" @@ -157,8 +157,8 @@ }, { "description": ".NET Core Debugger (macOS / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-osx.10.11-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-osx.10.11-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-osx.10.11-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-osx.10.11-x64.zip", "installPath": ".debugger", "runtimeIds": [ "osx.10.11-x64" @@ -170,8 +170,8 @@ }, { "description": ".NET Core Debugger (CentOS / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-centos.7-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-centos.7-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-centos.7-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-centos.7-x64.zip", "installPath": ".debugger", "runtimeIds": [ "centos.7-x64" @@ -183,8 +183,8 @@ }, { "description": ".NET Core Debugger (Debian / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-debian.8-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-debian.8-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-debian.8-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-debian.8-x64.zip", "installPath": ".debugger", "runtimeIds": [ "debian.8-x64" @@ -196,8 +196,8 @@ }, { "description": ".NET Core Debugger (Fedora 23 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-fedora.23-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-fedora.23-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-fedora.23-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-fedora.23-x64.zip", "installPath": ".debugger", "runtimeIds": [ "fedora.23-x64" @@ -209,8 +209,8 @@ }, { "description": ".NET Core Debugger (Fedora 24 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-fedora.24-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-fedora.24-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-fedora.24-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-fedora.24-x64.zip", "installPath": ".debugger", "runtimeIds": [ "fedora.24-x64" @@ -222,8 +222,8 @@ }, { "description": ".NET Core Debugger (OpenSUSE 13 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.13.2-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.13.2-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-opensuse.13.2-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-opensuse.13.2-x64.zip", "installPath": ".debugger", "runtimeIds": [ "opensuse.13.2-x64" @@ -235,8 +235,8 @@ }, { "description": ".NET Core Debugger (OpenSUSE 42 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.42.1-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-opensuse.42.1-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-opensuse.42.1-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-opensuse.42.1-x64.zip", "installPath": ".debugger", "runtimeIds": [ "opensuse.42.1-x64" @@ -248,8 +248,8 @@ }, { "description": ".NET Core Debugger (RHEL / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-rhel.7.2-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-rhel.7.2-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-rhel.7.2-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-rhel.7.2-x64.zip", "installPath": ".debugger", "runtimeIds": [ "rhel.7-x64" @@ -261,8 +261,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 14.04 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.14.04-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.14.04-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.14.04-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.14.04-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.14.04-x64" @@ -274,8 +274,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 16.04 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.04-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.04-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.16.04-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.16.04-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.16.04-x64" @@ -287,8 +287,8 @@ }, { "description": ".NET Core Debugger (Ubuntu 16.10 / x64)", - "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.10-x64.zip", - "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-0/coreclr-debug-ubuntu.16.10-x64.zip", + "url": "https://vsdebugger.azureedge.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.16.10-x64.zip", + "fallbackUrl": "https://vsdebugger.blob.core.windows.net/coreclr-debug-1-7-1/coreclr-debug-ubuntu.16.10-x64.zip", "installPath": ".debugger", "runtimeIds": [ "ubuntu.16.10-x64" From 365a2d0eb514e42601066758fb0e58d269ef99c2 Mon Sep 17 00:00:00 2001 From: Dustin Campbell Date: Tue, 7 Feb 2017 15:57:16 -0800 Subject: [PATCH 191/192] v1.7.0-beta4 -> v1.7.0-beta5 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 43374b3731..0108e73380 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "csharp", "publisher": "ms-vscode", - "version": "1.7.0-beta4", + "version": "1.7.0-beta5", "description": "C# for Visual Studio Code (powered by OmniSharp).", "displayName": "C#", "author": "Microsoft Corporation", From add9a9600ba1812df15f28473d72ed83f78a3586 Mon Sep 17 00:00:00 2001 From: Rajkumar Janakiraman Date: Tue, 7 Feb 2017 17:15:34 -0800 Subject: [PATCH 192/192] Adding pipeArgs documentation. (#1202) * Adding pipeArgs documentation. * Ran "gulp generateOptionsSchema" to update package.json. Fixed instructions in the readme.md --- package.json | 18 +++++++++--------- src/tools/OptionsSchema.json | 4 ++-- src/tools/README.md | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/package.json b/package.json index 0108e73380..e9ec951725 100644 --- a/package.json +++ b/package.json @@ -714,7 +714,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -754,7 +754,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -791,7 +791,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -828,7 +828,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -964,7 +964,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -1004,7 +1004,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -1041,7 +1041,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -1078,7 +1078,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -1151,4 +1151,4 @@ } ] } -} +} \ No newline at end of file diff --git a/src/tools/OptionsSchema.json b/src/tools/OptionsSchema.json index 71b81eccf1..62f52c114a 100644 --- a/src/tools/OptionsSchema.json +++ b/src/tools/OptionsSchema.json @@ -26,7 +26,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, @@ -64,7 +64,7 @@ }, "pipeArgs": { "type": "array", - "description": "Command line arguments passed to the pipe program.", + "description": "Command line arguments passed to the pipe program. Token ${debuggerCommand} in pipeArgs will get replaced by the full debugger command, this token can be specified inline with other arguments. If ${debuggerCommand} isn’t used in any argument, the full debugger command will be instead be added to the end of the argument list.", "items": { "type": "string" }, diff --git a/src/tools/README.md b/src/tools/README.md index 7f2585c39d..bab72413f2 100644 --- a/src/tools/README.md +++ b/src/tools/README.md @@ -2,7 +2,7 @@ OptionsSchema.json defines the type for Launch/Attach options. # GenerateOptionsSchema -If there are any modifications to the OptionsSchema.json file. Please run `gulp updateOptionsSchema` at the repo root. +If there are any modifications to the OptionsSchema.json file. Please run `gulp generateOptionsSchema` at the repo root. This will call GenerateOptionsSchema and update the package.json file. **NOTE:** *Any manual changes to package.json's object.contributes.debuggers[0].configurationAttributes will be