1 | var Tokeniser = require("./Tokeniser");
|
2 | var Token = require("../lib/Token");
|
3 | var StringSource = require("../lib/StringSource");
|
4 |
|
5 | exports.stringIsSingleIdentifier = stringIsTokenisedTo("blah", [
|
6 | new Token("identifier", "blah", stringSourceRange("blah", 0, 4)),
|
7 | new Token("end", null, stringSourceRange("blah", 4, 4))
|
8 | ]);
|
9 |
|
10 | exports.identifiersAreSeparatedByWhitespace = stringIsTokenisedTo("one two", [
|
11 | new Token("identifier", "one", stringSourceRange("one two", 0, 3)),
|
12 | new Token("identifier", "two", stringSourceRange("one two", 4, 7)),
|
13 | new Token("end", null, stringSourceRange("one two", 7, 7))
|
14 | ]);
|
15 |
|
16 | exports.canDetectKeywords = stringIsTokenisedTo("true", [
|
17 | new Token("keyword", "true", stringSourceRange("true", 0, 4)),
|
18 | new Token("end", null, stringSourceRange("true", 4, 4))
|
19 | ]);
|
20 |
|
21 | exports.emptyStringIsTokenisedToSingleEndToken = stringIsTokenisedTo("", [
|
22 | new Token("end", null, stringSourceRange("", 0, 0))
|
23 | ]);
|
24 |
|
25 | function stringIsTokenisedTo(input, expected) {
|
26 | return function(test) {
|
27 | test.deepEqual(expected, tokenise(input));
|
28 | test.done();
|
29 | };
|
30 | };
|
31 |
|
32 | function stringSourceRange(string, startIndex, endIndex) {
|
33 | return new StringSource(string).range(startIndex, endIndex);
|
34 | };
|
35 |
|
36 | function tokenise(input) {
|
37 | return new Tokeniser({keywords: ["true"]}).tokenise(input);
|
38 | };
|