@using Berp; @helper CallProduction(ProductionRule production) { switch(production.Type) { case ProductionRuleType.Start: @:startRule(context, '@production.RuleName'); break; case ProductionRuleType.End: @:endRule(context, '@production.RuleName'); break; case ProductionRuleType.Process: @:build(context, token); break; } } @helper HandleParserError(IEnumerable expectedTokens, State state) { var stateComment = "State: @state.Id - @Raw(state.Comment)"; token.detach(); var expectedTokens = ["@Raw(string.Join("\", \"", expectedTokens))"]; var error = token.isEof ? Errors.UnexpectedEOFException.create(token, expectedTokens, stateComment) : Errors.UnexpectedTokenException.create(token, expectedTokens, stateComment); if (this.stopAtFirstError) throw error; addError(context, error); return @state.Id;} @helper MatchToken(TokenType tokenType) {match_@(tokenType)(context, token)} // This file is generated. Do not edit! Edit gherkin-javascript.razor instead. var Errors = require('./errors'); var AstBuilder = require('./ast_builder'); var TokenScanner = require('./token_scanner'); var TokenMatcher = require('./token_matcher'); var RULE_TYPES = [ 'None', @foreach(var rule in Model.RuleSet.Where(r => !r.TempRule)) { '@rule.Name.Replace("#", "_")', // @rule.ToString(true) } ]; module.exports = function Parser(builder) { builder = builder || new AstBuilder(); var context; this.parse = function(tokenScanner, tokenMatcher) { if(typeof tokenScanner == 'string') { tokenScanner = new TokenScanner(tokenScanner); } tokenMatcher = tokenMatcher || new TokenMatcher(); builder.reset(); tokenMatcher.reset(); context = { tokenScanner: tokenScanner, tokenMatcher: tokenMatcher, tokenQueue: [], errors: [] }; startRule(context, 'Feature'); var state = 0; var token = null; while(true) { token = readToken(context); state = matchToken(state, token, context); if(token.isEof) break; } endRule(context, 'Feature'); if(context.errors.length > 0) { throw Errors.CompositeParserException.create(context.errors); } return getResult(); }; function addError(context, error) { context.errors.push(error); if (context.errors.length > 10) throw Errors.CompositeParserException.create(context.errors); } function startRule(context, ruleType) { handleAstError(context, function () { builder.startRule(ruleType); }); } function endRule(context, ruleType) { handleAstError(context, function () { builder.endRule(ruleType); }); } function build(context, token) { handleAstError(context, function () { builder.build(token); }); } function getResult() { return builder.getResult(); } function handleAstError(context, action) { handleExternalError(context, true, action) } function handleExternalError(context, defaultValue, action) { if(this.stopAtFirstError) return action(); try { return action(); } catch (e) { if(e instanceof Errors.CompositeParserException) { e.errors.forEach(function (error) { addError(context, error); }); } else if( e instanceof Errors.ParserException || e instanceof Errors.AstBuilderException || e instanceof Errors.UnexpectedTokenException || e instanceof Errors.NoSuchLanguageException ) { addError(context, e); } else { throw e; } } return defaultValue; } function readToken(context) { return context.tokenQueue.length > 0 ? context.tokenQueue.shift() : context.tokenScanner.read(); } function matchToken(state, token, context) { switch(state) { @foreach(var state in Model.States.Values.Where(s => !s.IsEndState)) { @:case @state.Id: @:return matchTokenAt_@(state.Id)(token, context); } default: throw new Error("Unknown state: " + state); } } @foreach(var state in Model.States.Values.Where(s => !s.IsEndState)) { // @Raw(state.Comment) function matchTokenAt_@(state.Id)(token, context) { @foreach(var transition in state.Transitions) { @:if(@MatchToken(transition.TokenType)) { if (transition.LookAheadHint != null) { @:if(lookahead_@(transition.LookAheadHint.Id)(context, token)) { } foreach(var production in transition.Productions) { @CallProduction(production) } @:return @transition.TargetState; if (transition.LookAheadHint != null) { @:} } @:} } @HandleParserError(state.Transitions.Select(t => "#" + t.TokenType.ToString()).Distinct(), state) } } @foreach(var rule in Model.RuleSet.TokenRules) { function match_@(rule.Name.Replace("#", ""))(context, token) { @if (rule.Name != "#EOF") { @:if(token.isEof) return false; } return handleExternalError(context, false, function () { return context.tokenMatcher.match_@(rule.Name.Replace("#", ""))(token); }); } } @foreach(var lookAheadHint in Model.RuleSet.LookAheadHints) { function lookahead_@(lookAheadHint.Id)(context, currentToken) { currentToken.detach(); var token; var queue = []; var match = false; do { token = readToken(context); token.detach(); queue.push(token); if (false @foreach(var tokenType in lookAheadHint.ExpectedTokens) { || @MatchToken(tokenType)}) { match = true; break; } } while(false @foreach(var tokenType in lookAheadHint.Skip) { || @MatchToken(tokenType)}); context.tokenQueue = context.tokenQueue.concat(queue); return match; } } }