UNPKG

7.35 kBJavaScriptView Raw
1"use strict";
2/**
3 * @license
4 * Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
5 * This code may only be used under the BSD style license found at
6 * http://polymer.github.io/LICENSE.txt The complete set of authors may be found
7 * at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
8 * be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
9 * Google as part of the polymer project is also subject to an additional IP
10 * rights grant found at http://polymer.github.io/PATENTS.txt
11 */
12Object.defineProperty(exports, "__esModule", { value: true });
13const chai_1 = require("chai");
14const token_1 = require("../shady-css/token");
15const tokenizer_1 = require("../shady-css/tokenizer");
16const fixtures = require("./fixtures");
17const helpers = require("./helpers");
18describe('Tokenizer', () => {
19 describe('when tokenizing basic structures', () => {
20 it('can identify strings', () => {
21 chai_1.expect(new tokenizer_1.Tokenizer('"foo"').flush()).to.be.eql(helpers.linkedTokens([
22 new token_1.Token(token_1.Token.type.string, 0, 5)
23 ]));
24 });
25 it('can identify comments', () => {
26 chai_1.expect(new tokenizer_1.Tokenizer('/*foo*/').flush()).to.be.eql(helpers.linkedTokens([
27 new token_1.Token(token_1.Token.type.comment, 0, 7)
28 ]));
29 });
30 it('can identify words', () => {
31 chai_1.expect(new tokenizer_1.Tokenizer('font-family').flush())
32 .to.be.eql(helpers.linkedTokens([new token_1.Token(token_1.Token.type.word, 0, 11)]));
33 });
34 it('can identify boundaries', () => {
35 chai_1.expect(new tokenizer_1.Tokenizer('@{};()').flush()).to.be.eql(helpers.linkedTokens([
36 new token_1.Token(token_1.Token.type.at, 0, 1),
37 new token_1.Token(token_1.Token.type.openBrace, 1, 2),
38 new token_1.Token(token_1.Token.type.closeBrace, 2, 3),
39 new token_1.Token(token_1.Token.type.semicolon, 3, 4),
40 new token_1.Token(token_1.Token.type.openParenthesis, 4, 5),
41 new token_1.Token(token_1.Token.type.closeParenthesis, 5, 6)
42 ]));
43 });
44 });
45 describe('when tokenizing standard CSS structures', () => {
46 it('can tokenize a basic ruleset', () => {
47 helpers.expectTokenSequence(new tokenizer_1.Tokenizer(fixtures.basicRuleset), [
48 token_1.Token.type.whitespace, '\n', token_1.Token.type.word, 'body',
49 token_1.Token.type.whitespace, ' ', token_1.Token.type.openBrace, '{',
50 token_1.Token.type.whitespace, '\n ', token_1.Token.type.word, 'margin',
51 token_1.Token.type.colon, ':', token_1.Token.type.whitespace, ' ',
52 token_1.Token.type.word, '0', token_1.Token.type.semicolon, ';',
53 token_1.Token.type.whitespace, '\n ', token_1.Token.type.word, 'padding',
54 token_1.Token.type.colon, ':', token_1.Token.type.whitespace, ' ',
55 token_1.Token.type.word, '0px', token_1.Token.type.whitespace, '\n',
56 token_1.Token.type.closeBrace, '}', token_1.Token.type.whitespace, '\n'
57 ]);
58 });
59 it('can tokenize @rules', () => {
60 helpers.expectTokenSequence(new tokenizer_1.Tokenizer(fixtures.atRules), [
61 token_1.Token.type.whitespace,
62 '\n',
63 token_1.Token.type.at,
64 '@',
65 token_1.Token.type.word,
66 'import',
67 token_1.Token.type.whitespace,
68 ' ',
69 token_1.Token.type.word,
70 'url',
71 token_1.Token.type.openParenthesis,
72 '(',
73 token_1.Token.type.string,
74 '\'foo.css\'',
75 token_1.Token.type.closeParenthesis,
76 ')',
77 token_1.Token.type.semicolon,
78 ';',
79 token_1.Token.type.whitespace,
80 '\n\n',
81 token_1.Token.type.at,
82 '@',
83 token_1.Token.type.word,
84 'font-face',
85 token_1.Token.type.whitespace,
86 ' ',
87 token_1.Token.type.openBrace,
88 '{',
89 token_1.Token.type.whitespace,
90 '\n ',
91 token_1.Token.type.word,
92 'font-family',
93 token_1.Token.type.colon,
94 ':',
95 token_1.Token.type.whitespace,
96 ' ',
97 token_1.Token.type.word,
98 'foo',
99 token_1.Token.type.semicolon,
100 ';',
101 token_1.Token.type.whitespace,
102 '\n',
103 token_1.Token.type.closeBrace,
104 '}',
105 token_1.Token.type.whitespace,
106 '\n\n',
107 token_1.Token.type.at,
108 '@',
109 token_1.Token.type.word,
110 'charset',
111 token_1.Token.type.whitespace,
112 ' ',
113 token_1.Token.type.string,
114 '\'foo\'',
115 token_1.Token.type.semicolon,
116 ';',
117 token_1.Token.type.whitespace,
118 '\n'
119 ]);
120 });
121 it('navigates pathological boundary usage', () => {
122 helpers.expectTokenSequence(new tokenizer_1.Tokenizer(fixtures.extraSemicolons), [
123 token_1.Token.type.whitespace, '\n', token_1.Token.type.colon, ':',
124 token_1.Token.type.word, 'host', token_1.Token.type.whitespace, ' ',
125 token_1.Token.type.openBrace, '{', token_1.Token.type.whitespace, '\n ',
126 token_1.Token.type.word, 'margin', token_1.Token.type.colon, ':',
127 token_1.Token.type.whitespace, ' ', token_1.Token.type.word, '0',
128 token_1.Token.type.semicolon, ';', token_1.Token.type.semicolon, ';',
129 token_1.Token.type.semicolon, ';', token_1.Token.type.whitespace, '\n ',
130 token_1.Token.type.word, 'padding', token_1.Token.type.colon, ':',
131 token_1.Token.type.whitespace, ' ', token_1.Token.type.word, '0',
132 token_1.Token.type.semicolon, ';', token_1.Token.type.semicolon, ';',
133 token_1.Token.type.whitespace, '\n ', token_1.Token.type.semicolon, ';',
134 token_1.Token.type.word, 'display', token_1.Token.type.colon, ':',
135 token_1.Token.type.whitespace, ' ', token_1.Token.type.word, 'block',
136 token_1.Token.type.semicolon, ';', token_1.Token.type.whitespace, '\n',
137 token_1.Token.type.closeBrace, '}', token_1.Token.type.semicolon, ';',
138 token_1.Token.type.whitespace, '\n'
139 ]);
140 });
141 });
142 describe('when extracting substrings', () => {
143 it('can slice the string using tokens', () => {
144 const tokenizer = new tokenizer_1.Tokenizer('foo bar');
145 const substring = tokenizer.slice(new token_1.Token(token_1.Token.type.word, 2, 3), new token_1.Token(token_1.Token.type.word, 5, 6));
146 chai_1.expect(substring).to.be.eql('o ba');
147 });
148 });
149});
150//# sourceMappingURL=tokenizer-test.js.map
\No newline at end of file