1 | "use strict";
|
2 | Object.defineProperty(exports, "__esModule", { value: true });
|
3 | const util_1 = require("util");
|
4 | const Token = require("./token");
|
5 | const addTrailingNewline = (s) => (s.endsWith('\n') ? s : `${s}\n`);
|
6 | const Lexer = require('lex');
|
7 | var SC;
|
8 | (function (SC) {
|
9 | SC[SC["Init"] = 0] = "Init";
|
10 | SC[SC["Machine"] = 1] = "Machine";
|
11 | })(SC || (SC = {}));
|
12 | function lex(body) {
|
13 | body = addTrailingNewline(body);
|
14 | let tokens = [];
|
15 | let lexer = new Lexer((char) => {
|
16 | throw new Error(`Unexpected character during netrc parsing. char: ${util_1.inspect(char)}:
|
17 | ${body}`);
|
18 | });
|
19 | lexer.addRule(/\s*\n/, function (content) {
|
20 | this.state = SC.Init;
|
21 | tokens.push({ type: 'newline', content });
|
22 | }, [SC.Init, SC.Machine]);
|
23 | lexer.addRule(/\s*(#.*)\n/, function (content) {
|
24 | tokens.push({ type: 'comment', content });
|
25 | }, [SC.Init, SC.Machine]);
|
26 | lexer.addRule(/([ \t]*)macdef.*\n(.*\S.+(\n|$))*/, function (content) {
|
27 | tokens.push({ type: 'macdef', content });
|
28 | }, [SC.Init, SC.Machine]);
|
29 | lexer.addRule(/([ \t]*)machine +(\S+)([ \t]*\n)?/, function (_, pre, host, post) {
|
30 | this.state = SC.Machine;
|
31 | tokens.push(new Token.Machine({ host, pre, post }));
|
32 | }, [SC.Init, SC.Machine]);
|
33 | lexer.addRule(/([ \t]*)default([ \t]*\n)?/, function (_, pre, post) {
|
34 | this.state = SC.Machine;
|
35 | tokens.push(new Token.DefaultMachine({ pre, post }));
|
36 | }, [SC.Init, SC.Machine]);
|
37 | lexer.addRule(/([ \t]*)([a-zA-Z]+) +(\S+)([ \t]*\n)?/, (_, pre, name, value, post) => {
|
38 | tokens.push(new Token.Prop({ pre, post, name: name, value }));
|
39 | }, [SC.Machine]);
|
40 | lexer.setInput(body).lex();
|
41 | return tokens;
|
42 | }
|
43 | exports.default = lex;
|