UNPKG

2.47 kBJavaScriptView Raw
1var RegexTokeniser = require("../lib/regex-tokeniser").RegexTokeniser;
2var Token = require("../lib/Token");
3var StringSource = require("../lib/StringSource");
4
5exports.emptyStringIsTokenisedToEndToken = stringIsTokenisedTo("", [
6 endToken("")
7]);
8
9exports.canMatchSingleToken = stringIsTokenisedTo("blah", [
10 new Token("identifier", "blah", stringSourceRange("blah", 0, 4)),
11 endToken("blah")
12]);
13
14exports.canMatchMultipleTokens = stringIsTokenisedTo("a.btn", [
15 new Token("identifier", "a", stringSourceRange("a.btn", 0, 1)),
16 new Token("dot", ".", stringSourceRange("a.btn", 1, 2)),
17 new Token("identifier", "btn", stringSourceRange("a.btn", 2, 5)),
18 endToken("a.btn")
19]);
20
21exports.unrecognisedCharactersAreTokenised = stringIsTokenisedTo("!btn", [
22 new Token("unrecognisedCharacter", "!", stringSourceRange("!btn", 0, 1)),
23 new Token("identifier", "btn", stringSourceRange("!btn", 1, 4)),
24 endToken("!btn")
25]);
26
27exports.firstMatchingRuleIsUsed = stringIsTokenisedTo(":", [
28 new Token("colon1", ":", stringSourceRange(":", 0, 1)),
29 endToken(":")
30]);
31
32exports.valuesOfZeroLengthAreIgnored = function(test) {
33 var expectedTokens = [
34 new Token("unrecognisedCharacter", "!", stringSourceRange("!btn", 0, 1)),
35 new Token("identifier", "btn", stringSourceRange("!btn", 1, 4)),
36 endToken("!btn")
37 ];
38
39 var rules = [
40 {
41 name: "identifier",
42 regex: /([a-z]*)/
43 }
44 ];
45 var tokeniser = new RegexTokeniser(rules);
46 test.deepEqual(expectedTokens, tokeniser.tokenise("!btn"));
47 test.done();
48};
49
50function endToken(input) {
51 var source = stringSourceRange(input, input.length, input.length);
52 return new Token("end", null, source);
53}
54
55function stringIsTokenisedTo(input, expected) {
56 return function(test) {
57 test.deepEqual(expected, tokenise(input));
58 test.done();
59 };
60};
61
62function stringSourceRange(string, startIndex, endIndex) {
63 return new StringSource(string).range(startIndex, endIndex);
64};
65
66function tokenise(input) {
67 var rules = [
68 {
69 name: "identifier",
70 regex: /([a-z]+)/
71 },
72 {
73 name: "dot",
74 regex: /\./
75 },
76 {
77 name: "colon1",
78 regex: /:/
79 },
80 {
81 name: "colon2",
82 regex: /:/
83 }
84 ];
85 var tokeniser = new RegexTokeniser(rules);
86 return tokeniser.tokenise(input);
87};
88