From 93de8069ec2f424519aa4ddcdad4427961da2efe Mon Sep 17 00:00:00 2001 From: Wliu Date: Sat, 1 Aug 2015 17:33:52 -0400 Subject: [PATCH] :art: Clean up specs file --- spec/javascript-spec.coffee | 136 ++++++++++++++++-------------------- 1 file changed, 61 insertions(+), 75 deletions(-) diff --git a/spec/javascript-spec.coffee b/spec/javascript-spec.coffee index 1be32abd..eaa22bb4 100644 --- a/spec/javascript-spec.coffee +++ b/spec/javascript-spec.coffee @@ -36,7 +36,6 @@ describe "Javascript grammar", -> {tokens} = grammar.tokenizeLine('with') expect(tokens[0]).toEqual value: 'with', scopes: ['source.js', 'keyword.control.js'] - describe "built-in globals", -> it "tokenizes them as support classes", -> {tokens} = grammar.tokenizeLine('window') @@ -130,7 +129,6 @@ describe "Javascript grammar", -> 1 / 2 """ - expect(lines[0][0]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js'] expect(lines[1][0]).toEqual value: '/ ', scopes: ['source.js'] expect(lines[1][1]).toEqual value: '2', scopes: ['source.js', 'constant.numeric.js'] @@ -326,12 +324,12 @@ describe "Javascript grammar", -> expect(tokens[4]).toEqual value: 'SomeClass', scopes: ['source.js', 'meta.class.js', 'entity.name.type.js'] describe "ES6 import", -> - it "Tokenizes import ... as", -> + it "tokenizes import ... as", -> {tokens} = grammar.tokenizeLine('import \'react\' as React') expect(tokens[0]).toEqual value: 'import', scopes: ['source.js', 'meta.import.js', 'keyword.control.js'] expect(tokens[6]).toEqual value: 'as', scopes: ['source.js', 'meta.import.js', 'keyword.control.js'] - it "Tokenizes import ... from", -> + it "tokenizes import ... from", -> {tokens} = grammar.tokenizeLine('import React from \'react\'') expect(tokens[0]).toEqual value: 'import', scopes: ['source.js', 'meta.import.js', 'keyword.control.js'] expect(tokens[4]).toEqual value: 'from', scopes: ['source.js', 'meta.import.js', 'keyword.control.js'] @@ -340,18 +338,17 @@ describe "Javascript grammar", -> expect(tokens[6]).toEqual value: 'from', scopes: ['source.js', 'meta.import.js', 'keyword.control.js'] describe "ES6 yield", -> - it "Tokenizes yield", -> + it "tokenizes yield", -> {tokens} = grammar.tokenizeLine('yield next') expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] - it "Tokenizes yield*", -> + it "tokenizes yield*", -> {tokens} = grammar.tokenizeLine('yield * next') expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] expect(tokens[2]).toEqual value: '*', scopes: ['source.js', 'meta.control.yield.js', 'storage.modifier.js'] it "doesn't confuse strings and functions", -> {tokens} = grammar.tokenizeLine("'a'.b(':c(d)')") - expect(tokens[0]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.begin.js'] expect(tokens[1]).toEqual value: "a", scopes: ['source.js', 'string.quoted.single.js'] expect(tokens[2]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.end.js'] @@ -377,27 +374,9 @@ describe "Javascript grammar", -> {tokens} = grammar.tokenizeLine('default: ') expect(tokens[0]).toEqual value: 'default', scopes: ['source.js', 'keyword.control.js'] - it "tokenizes arrow functions with params", -> - {tokens} = grammar.tokenizeLine('(param1,param2)=>{}') - expect(tokens[0]).toEqual value: '(', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.begin.js'] - expect(tokens[1]).toEqual value: 'param1', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js'] - expect(tokens[3]).toEqual value: 'param2', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js'] - expect(tokens[4]).toEqual value: ')', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.end.js'] - expect(tokens[5]).toEqual value: '=>', scopes: ['source.js', 'meta.function.arrow.js', 'storage.type.arrow.js'] - - it "tokenizes comments in function params", -> - {tokens} = grammar.tokenizeLine('foo: function (/**Bar*/bar){') - - expect(tokens[5]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] - expect(tokens[6]).toEqual value: '/**', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - expect(tokens[7]).toEqual value: 'Bar', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js'] - expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - expect(tokens[9]).toEqual value: 'bar', scopes: ['source.js', 'meta.function.json.js', 'variable.parameter.function.js'] - describe "non-anonymous functions", -> it "tokenizes methods", -> {tokens} = grammar.tokenizeLine('Foo.method = function nonAnonymous(') - expect(tokens[0]).toEqual value: 'Foo', scopes: ['source.js', 'meta.function.js', 'support.class.js'] expect(tokens[2]).toEqual value: 'method', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] expect(tokens[4]).toEqual value: '=', scopes: ['source.js', 'meta.function.js', 'keyword.operator.js'] @@ -415,7 +394,6 @@ describe "Javascript grammar", -> it "tokenizes functions", -> {tokens} = grammar.tokenizeLine('var func = function nonAnonymous(') - expect(tokens[0]).toEqual value: 'var', scopes: ['source.js', 'storage.modifier.js'] expect(tokens[2]).toEqual value: 'func', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] expect(tokens[4]).toEqual value: '=', scopes: ['source.js', 'meta.function.js', 'keyword.operator.js'] @@ -425,7 +403,6 @@ describe "Javascript grammar", -> it "tokenizes object functions", -> {tokens} = grammar.tokenizeLine('foo: function nonAnonymous(') - expect(tokens[0]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.js'] expect(tokens[3]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] @@ -434,7 +411,6 @@ describe "Javascript grammar", -> it "tokenizes quoted object functions", -> {tokens} = grammar.tokenizeLine('"foo": function nonAnonymous(') - expect(tokens[1]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'string.quoted.double.js', 'entity.name.function.js'] expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.js'] expect(tokens[5]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] @@ -447,53 +423,63 @@ describe "Javascript grammar", -> expect(tokens[2]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js'] expect(tokens[4]).toEqual value: 'f', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] - it "tokenizes /* */ comments", -> - {tokens} = grammar.tokenizeLine('/**/') - - expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] - expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] - - {tokens} = grammar.tokenizeLine('/* foo */') - - expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] - expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.js'] - expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] - - it "tokenizes /** */ comments", -> - {tokens} = grammar.tokenizeLine('/***/') - - expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - - {tokens} = grammar.tokenizeLine('/** foo */') - - expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.documentation.js'] - expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - - {tokens} = grammar.tokenizeLine('/** @mixins */') - - expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - expect(tokens[2]).toEqual value: '@mixins', scopes: ['source.js', 'comment.block.documentation.js', 'storage.type.class.jsdoc'] - expect(tokens[3]).toEqual value: ' ', scopes: ['source.js', 'comment.block.documentation.js'] - expect(tokens[4]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] - - it "tokenizes comments inside function parameters correctly", -> - {tokens} = grammar.tokenizeLine('function test(arg1 /*, arg2 */) {}') - - expect(tokens[0]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js'] - expect(tokens[1]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js'] - expect(tokens[2]).toEqual value: 'test', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] - expect(tokens[3]).toEqual value: '(', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.begin.js'] - expect(tokens[4]).toEqual value: 'arg1', scopes: ['source.js', 'meta.function.js', 'variable.parameter.function.js'] - expect(tokens[5]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js'] - expect(tokens[6]).toEqual value: '/*', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js'] - expect(tokens[7]).toEqual value: ', arg2 ', scopes: ['source.js', 'meta.function.js', 'comment.block.js'] - expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js'] - expect(tokens[9]).toEqual value: ')', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.end.js'] - expect(tokens[10]).toEqual value: ' ', scopes: ['source.js'] - expect(tokens[11]).toEqual value: '{', scopes: ['source.js', 'punctuation.section.scope.begin.js'] - expect(tokens[12]).toEqual value: '}', scopes: ['source.js', 'punctuation.section.scope.end.js'] + it "tokenizes arrow functions with params", -> + {tokens} = grammar.tokenizeLine('(param1,param2)=>{}') + expect(tokens[0]).toEqual value: '(', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.begin.js'] + expect(tokens[1]).toEqual value: 'param1', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js'] + expect(tokens[3]).toEqual value: 'param2', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.end.js'] + expect(tokens[5]).toEqual value: '=>', scopes: ['source.js', 'meta.function.arrow.js', 'storage.type.arrow.js'] + + describe "comments", -> + it "tokenizes /* */ comments", -> + {tokens} = grammar.tokenizeLine('/**/') + expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] + expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] + + {tokens} = grammar.tokenizeLine('/* foo */') + expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] + expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.js'] + expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js'] + + it "tokenizes /** */ comments", -> + {tokens} = grammar.tokenizeLine('/***/') + expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + + {tokens} = grammar.tokenizeLine('/** foo */') + expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.documentation.js'] + expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + + {tokens} = grammar.tokenizeLine('/** @mixins */') + expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + expect(tokens[2]).toEqual value: '@mixins', scopes: ['source.js', 'comment.block.documentation.js', 'storage.type.class.jsdoc'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.js', 'comment.block.documentation.js'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + + it "tokenizes comments inside function parameters correctly", -> + {tokens} = grammar.tokenizeLine('function test(arg1 /*, arg2 */) {}') + expect(tokens[0]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.begin.js'] + expect(tokens[4]).toEqual value: 'arg1', scopes: ['source.js', 'meta.function.js', 'variable.parameter.function.js'] + expect(tokens[5]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js'] + expect(tokens[6]).toEqual value: '/*', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js'] + expect(tokens[7]).toEqual value: ', arg2 ', scopes: ['source.js', 'meta.function.js', 'comment.block.js'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js'] + expect(tokens[9]).toEqual value: ')', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.end.js'] + expect(tokens[10]).toEqual value: ' ', scopes: ['source.js'] + expect(tokens[11]).toEqual value: '{', scopes: ['source.js', 'punctuation.section.scope.begin.js'] + expect(tokens[12]).toEqual value: '}', scopes: ['source.js', 'punctuation.section.scope.end.js'] + + {tokens} = grammar.tokenizeLine('foo: function (/**Bar*/bar){') + expect(tokens[5]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] + expect(tokens[6]).toEqual value: '/**', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + expect(tokens[7]).toEqual value: 'Bar', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js'] + expect(tokens[9]).toEqual value: 'bar', scopes: ['source.js', 'meta.function.json.js', 'variable.parameter.function.js'] describe "indentation", -> editor = null