Skip to content
This repository was archived by the owner on Dec 15, 2022. It is now read-only.

Clean up specs file #185

Merged
merged 1 commit into from
Aug 2, 2015
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 61 additions & 75 deletions spec/javascript-spec.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ describe "Javascript grammar", ->
{tokens} = grammar.tokenizeLine('with')
expect(tokens[0]).toEqual value: 'with', scopes: ['source.js', 'keyword.control.js']


describe "built-in globals", ->
it "tokenizes them as support classes", ->
{tokens} = grammar.tokenizeLine('window')
Expand Down Expand Up @@ -130,7 +129,6 @@ describe "Javascript grammar", ->
1
/ 2
"""

expect(lines[0][0]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js']
expect(lines[1][0]).toEqual value: '/ ', scopes: ['source.js']
expect(lines[1][1]).toEqual value: '2', scopes: ['source.js', 'constant.numeric.js']
Expand Down Expand Up @@ -326,12 +324,12 @@ describe "Javascript grammar", ->
expect(tokens[4]).toEqual value: 'SomeClass', scopes: ['source.js', 'meta.class.js', 'entity.name.type.js']

describe "ES6 import", ->
it "Tokenizes import ... as", ->
it "tokenizes import ... as", ->
{tokens} = grammar.tokenizeLine('import \'react\' as React')
expect(tokens[0]).toEqual value: 'import', scopes: ['source.js', 'meta.import.js', 'keyword.control.js']
expect(tokens[6]).toEqual value: 'as', scopes: ['source.js', 'meta.import.js', 'keyword.control.js']

it "Tokenizes import ... from", ->
it "tokenizes import ... from", ->
{tokens} = grammar.tokenizeLine('import React from \'react\'')
expect(tokens[0]).toEqual value: 'import', scopes: ['source.js', 'meta.import.js', 'keyword.control.js']
expect(tokens[4]).toEqual value: 'from', scopes: ['source.js', 'meta.import.js', 'keyword.control.js']
Expand All @@ -340,18 +338,17 @@ describe "Javascript grammar", ->
expect(tokens[6]).toEqual value: 'from', scopes: ['source.js', 'meta.import.js', 'keyword.control.js']

describe "ES6 yield", ->
it "Tokenizes yield", ->
it "tokenizes yield", ->
{tokens} = grammar.tokenizeLine('yield next')
expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js']

it "Tokenizes yield*", ->
it "tokenizes yield*", ->
{tokens} = grammar.tokenizeLine('yield * next')
expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js']
expect(tokens[2]).toEqual value: '*', scopes: ['source.js', 'meta.control.yield.js', 'storage.modifier.js']

it "doesn't confuse strings and functions", ->
{tokens} = grammar.tokenizeLine("'a'.b(':c(d)')")

expect(tokens[0]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.begin.js']
expect(tokens[1]).toEqual value: "a", scopes: ['source.js', 'string.quoted.single.js']
expect(tokens[2]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.end.js']
Expand All @@ -377,27 +374,9 @@ describe "Javascript grammar", ->
{tokens} = grammar.tokenizeLine('default: ')
expect(tokens[0]).toEqual value: 'default', scopes: ['source.js', 'keyword.control.js']

it "tokenizes arrow functions with params", ->
{tokens} = grammar.tokenizeLine('(param1,param2)=>{}')
expect(tokens[0]).toEqual value: '(', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[1]).toEqual value: 'param1', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js']
expect(tokens[3]).toEqual value: 'param2', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js']
expect(tokens[4]).toEqual value: ')', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.end.js']
expect(tokens[5]).toEqual value: '=>', scopes: ['source.js', 'meta.function.arrow.js', 'storage.type.arrow.js']

it "tokenizes comments in function params", ->
{tokens} = grammar.tokenizeLine('foo: function (/**Bar*/bar){')

expect(tokens[5]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[6]).toEqual value: '/**', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[7]).toEqual value: 'Bar', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js']
expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[9]).toEqual value: 'bar', scopes: ['source.js', 'meta.function.json.js', 'variable.parameter.function.js']

describe "non-anonymous functions", ->
it "tokenizes methods", ->
{tokens} = grammar.tokenizeLine('Foo.method = function nonAnonymous(')

expect(tokens[0]).toEqual value: 'Foo', scopes: ['source.js', 'meta.function.js', 'support.class.js']
expect(tokens[2]).toEqual value: 'method', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js']
expect(tokens[4]).toEqual value: '=', scopes: ['source.js', 'meta.function.js', 'keyword.operator.js']
Expand All @@ -415,7 +394,6 @@ describe "Javascript grammar", ->

it "tokenizes functions", ->
{tokens} = grammar.tokenizeLine('var func = function nonAnonymous(')

expect(tokens[0]).toEqual value: 'var', scopes: ['source.js', 'storage.modifier.js']
expect(tokens[2]).toEqual value: 'func', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js']
expect(tokens[4]).toEqual value: '=', scopes: ['source.js', 'meta.function.js', 'keyword.operator.js']
Expand All @@ -425,7 +403,6 @@ describe "Javascript grammar", ->

it "tokenizes object functions", ->
{tokens} = grammar.tokenizeLine('foo: function nonAnonymous(')

expect(tokens[0]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js']
expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.js']
expect(tokens[3]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js']
Expand All @@ -434,7 +411,6 @@ describe "Javascript grammar", ->

it "tokenizes quoted object functions", ->
{tokens} = grammar.tokenizeLine('"foo": function nonAnonymous(')

expect(tokens[1]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'string.quoted.double.js', 'entity.name.function.js']
expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.js']
expect(tokens[5]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js']
Expand All @@ -447,53 +423,63 @@ describe "Javascript grammar", ->
expect(tokens[2]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js']
expect(tokens[4]).toEqual value: 'f', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js']

it "tokenizes /* */ comments", ->
{tokens} = grammar.tokenizeLine('/**/')

expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/* foo */')

expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.js']
expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']

it "tokenizes /** */ comments", ->
{tokens} = grammar.tokenizeLine('/***/')

expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/** foo */')

expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.documentation.js']
expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/** @mixins */')

expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[2]).toEqual value: '@mixins', scopes: ['source.js', 'comment.block.documentation.js', 'storage.type.class.jsdoc']
expect(tokens[3]).toEqual value: ' ', scopes: ['source.js', 'comment.block.documentation.js']
expect(tokens[4]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

it "tokenizes comments inside function parameters correctly", ->
{tokens} = grammar.tokenizeLine('function test(arg1 /*, arg2 */) {}')

expect(tokens[0]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js']
expect(tokens[2]).toEqual value: 'test', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js']
expect(tokens[3]).toEqual value: '(', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[4]).toEqual value: 'arg1', scopes: ['source.js', 'meta.function.js', 'variable.parameter.function.js']
expect(tokens[5]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js']
expect(tokens[6]).toEqual value: '/*', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[7]).toEqual value: ', arg2 ', scopes: ['source.js', 'meta.function.js', 'comment.block.js']
expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[9]).toEqual value: ')', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.end.js']
expect(tokens[10]).toEqual value: ' ', scopes: ['source.js']
expect(tokens[11]).toEqual value: '{', scopes: ['source.js', 'punctuation.section.scope.begin.js']
expect(tokens[12]).toEqual value: '}', scopes: ['source.js', 'punctuation.section.scope.end.js']
it "tokenizes arrow functions with params", ->
{tokens} = grammar.tokenizeLine('(param1,param2)=>{}')
expect(tokens[0]).toEqual value: '(', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[1]).toEqual value: 'param1', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js']
expect(tokens[3]).toEqual value: 'param2', scopes: ['source.js', 'meta.function.arrow.js', 'variable.parameter.function.js']
expect(tokens[4]).toEqual value: ')', scopes: ['source.js', 'meta.function.arrow.js', 'punctuation.definition.parameters.end.js']
expect(tokens[5]).toEqual value: '=>', scopes: ['source.js', 'meta.function.arrow.js', 'storage.type.arrow.js']

describe "comments", ->
it "tokenizes /* */ comments", ->
{tokens} = grammar.tokenizeLine('/**/')
expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/* foo */')
expect(tokens[0]).toEqual value: '/*', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.js']
expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']

it "tokenizes /** */ comments", ->
{tokens} = grammar.tokenizeLine('/***/')
expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/** foo */')
expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.js', 'comment.block.documentation.js']
expect(tokens[2]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

{tokens} = grammar.tokenizeLine('/** @mixins */')
expect(tokens[0]).toEqual value: '/**', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[2]).toEqual value: '@mixins', scopes: ['source.js', 'comment.block.documentation.js', 'storage.type.class.jsdoc']
expect(tokens[3]).toEqual value: ' ', scopes: ['source.js', 'comment.block.documentation.js']
expect(tokens[4]).toEqual value: '*/', scopes: ['source.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']

it "tokenizes comments inside function parameters correctly", ->
{tokens} = grammar.tokenizeLine('function test(arg1 /*, arg2 */) {}')
expect(tokens[0]).toEqual value: 'function', scopes: ['source.js', 'meta.function.js', 'storage.type.function.js']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js']
expect(tokens[2]).toEqual value: 'test', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js']
expect(tokens[3]).toEqual value: '(', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[4]).toEqual value: 'arg1', scopes: ['source.js', 'meta.function.js', 'variable.parameter.function.js']
expect(tokens[5]).toEqual value: ' ', scopes: ['source.js', 'meta.function.js']
expect(tokens[6]).toEqual value: '/*', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[7]).toEqual value: ', arg2 ', scopes: ['source.js', 'meta.function.js', 'comment.block.js']
expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.js', 'comment.block.js', 'punctuation.definition.comment.js']
expect(tokens[9]).toEqual value: ')', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.end.js']
expect(tokens[10]).toEqual value: ' ', scopes: ['source.js']
expect(tokens[11]).toEqual value: '{', scopes: ['source.js', 'punctuation.section.scope.begin.js']
expect(tokens[12]).toEqual value: '}', scopes: ['source.js', 'punctuation.section.scope.end.js']

{tokens} = grammar.tokenizeLine('foo: function (/**Bar*/bar){')
expect(tokens[5]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js']
expect(tokens[6]).toEqual value: '/**', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[7]).toEqual value: 'Bar', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js']
expect(tokens[8]).toEqual value: '*/', scopes: ['source.js', 'meta.function.json.js', 'comment.block.documentation.js', 'punctuation.definition.comment.js']
expect(tokens[9]).toEqual value: 'bar', scopes: ['source.js', 'meta.function.json.js', 'variable.parameter.function.js']

describe "indentation", ->
editor = null
Expand Down