UNPKG

5.4 kBSource Map (JSON)View Raw
1{"version":3,"file":"Tokenizer.test.js","sourceRoot":"","sources":["../../../src/parser/__tests__/Tokenizer.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAS,SAAS,EAAE,MAAM,UAAU,CAAC;AAC5C,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAU5C,SAAS,aAAa,CAAC,MAAc;IACnC,IAAM,WAAW,GAAgB,IAAI,WAAW,EAAE,CAAC;IACnD,IAAM,aAAa,GAAkB,WAAW,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;IACrE,IAAM,MAAM,GAAY,aAAa,CAAC,MAAM,CAAC;IAE7C,IAAM,KAAK,GAAoB,EAAE,CAAC;IAElC,KAAoB,UAAM,EAAN,iBAAM,EAAN,oBAAM,EAAN,IAAM,EAAE;QAAvB,IAAM,KAAK,eAAA;QACd,KAAK,CAAC,IAAI,CAAC;YACT,WAAW,EAAE,aAAa,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC;YACpD,IAAI,EAAE,GAAG,GAAG,WAAW,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,GAAG,GAAG;YAC/D,IAAI,EAAE,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC;YACzD,SAAS,EAAE,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC;SACjC,CAAC,CAAC;QAEH,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,CAAC,UAAU,EAAE;YACvC,MAAM;SACP;KACF;IAED,MAAM,CAAC;QACL,MAAM,EAAE,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC;QACtC,MAAM,EAAE,KAAK;KACd,CAAC,CAAC,eAAe,EAAE,CAAC;AACvB,CAAC;AAED,IAAI,CAAC,2BAA2B,EAAE;IAChC,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1E,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACrE,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAE/D,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACrE,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAClE,MAAM,CAAC,SAAS,CAAC,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,0BAA0B,EAAE;IAC/B,aAAa,CACX;QACE,KAAK;QACL,YAAY;QACZ,WAAW;QACX,KAAK;KACN,CAAC,IAAI,CAAC,IAAI,CAAC,CACb,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,+BAA+B,EAAE;IACpC,aAAa,CAAC,OAAO,CAAC,CAAC;IAEvB,aAAa,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAE/C,aAAa,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,yCAAyC,EAAE;IAC9C,aAAa,CAAC,CAAC,KAAK,EAAE,gBAAgB,EAAE,2BAA2B,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1F,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,yCAAyC,EAAE;IAC9C,aAAa,CAAC,CAAC,KAAK,EAAE,2CAA2C,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AACxF,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,uBAAuB,EAAE;IAC5B,aAAa,CAAC,CAAC,KAAK,EAAE,qCAAqC,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAClF,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,uBAAuB,EAAE;IAC5B,aAAa,CAAC,CAAC,KAAK,EAAE,uCAAuC,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AACpF,CAAC,CAAC,CAAC","sourcesContent":["import { TSDocParser } from '../TSDocParser';\r\nimport { Tokenizer } from '../Tokenizer';\r\nimport { Token, TokenKind } from '../Token';\r\nimport { TestHelpers } from './TestHelpers';\r\nimport { ParserContext } from '../ParserContext';\r\n\r\ninterface ISnapshotItem {\r\n indexOfLine: number;\r\n line: string;\r\n span: string;\r\n tokenKind: string;\r\n}\r\n\r\nfunction matchSnapshot(buffer: string): void {\r\n const tsdocParser: TSDocParser = new TSDocParser();\r\n const parserContext: ParserContext = tsdocParser.parseString(buffer);\r\n const tokens: Token[] = parserContext.tokens;\r\n\r\n const items: ISnapshotItem[] = [];\r\n\r\n for (const token of tokens) {\r\n items.push({\r\n indexOfLine: parserContext.lines.indexOf(token.line),\r\n line: '>' + TestHelpers.getEscaped(token.line.toString()) + '<',\r\n span: TestHelpers.formatLineSpan(token.line, token.range),\r\n tokenKind: TokenKind[token.kind]\r\n });\r\n\r\n if (token.kind === TokenKind.EndOfInput) {\r\n break;\r\n }\r\n }\r\n\r\n expect({\r\n buffer: TestHelpers.getEscaped(buffer),\r\n tokens: items\r\n }).toMatchSnapshot();\r\n}\r\n\r\ntest('Tokenizer.isPunctuation()', () => {\r\n expect(Tokenizer.isPunctuation(TokenKind.OtherPunctuation)).toEqual(true);\r\n expect(Tokenizer.isPunctuation(TokenKind.DoubleQuote)).toEqual(true);\r\n expect(Tokenizer.isPunctuation(TokenKind.Slash)).toEqual(true);\r\n\r\n expect(Tokenizer.isPunctuation(TokenKind.EndOfInput)).toEqual(false);\r\n expect(Tokenizer.isPunctuation(TokenKind.Spacing)).toEqual(false);\r\n expect(Tokenizer.isPunctuation(TokenKind.AsciiWord)).toEqual(false);\r\n});\r\n\r\ntest('00 Tokenizer simple case', () => {\r\n matchSnapshot(\r\n [\r\n '/**',\r\n ' * line 1 ', // extra space at end of line\r\n ' * line 2',\r\n ' */'\r\n ].join('\\n')\r\n );\r\n});\r\n\r\ntest('01 Tokenizer degenerate cases', () => {\r\n matchSnapshot('/***/');\r\n\r\n matchSnapshot(['/**', ' *', ' */'].join('\\n'));\r\n\r\n matchSnapshot(['/**', ' ', ' ', ' */'].join('\\n'));\r\n});\r\n\r\ntest('02 Backslash escapes: positive examples', () => {\r\n matchSnapshot(['/**', ' * \\\\$\\\\@param', ' * double-backslash: \\\\\\\\', ' */'].join('\\n'));\r\n});\r\n\r\ntest('03 Backslash escapes: negative examples', () => {\r\n matchSnapshot(['/**', ' * letter: \\\\A space: \\\\ end of line: \\\\', ' */'].join('\\n'));\r\n});\r\n\r\ntest('04 General characters', () => {\r\n matchSnapshot(['/**', ' * !\"#$%&\\'()*+,-./:;<=>?@[]^_`{|}~', ' */'].join('\\n'));\r\n});\r\n\r\ntest('05 Spacing characters', () => {\r\n matchSnapshot(['/**', ' * space: tab: \\t form feed: \\f end', ' */'].join('\\n'));\r\n});\r\n"]}
\No newline at end of file