/** * @license * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. * This code may only be used under the BSD style license found at * http://polymer.github.io/LICENSE.txt The complete set of authors may be found * at http://polymer.github.io/AUTHORS.txt The complete set of contributors may * be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by * Google as part of the polymer project is also subject to an additional IP * rights grant found at http://polymer.github.io/PATENTS.txt */ import { AtRule, Comment, Declaration, Discarded, Rule, Rulelist, Ruleset, Stylesheet } from './common'; import { NodeFactory } from './node-factory'; import { Tokenizer } from './tokenizer'; /** * Class that implements a shady CSS parser. */ declare class Parser { nodeFactory: NodeFactory; /** * Create a Parser instance. When creating a Parser instance, a specialized * NodeFactory can be supplied to implement streaming analysis and * manipulation of the CSS AST. */ constructor(nodeFactory?: NodeFactory); /** * Parse CSS and generate an AST. * @param cssText The CSS to parse. * @return A CSS AST containing nodes that correspond to those * generated by the Parser's NodeFactory. */ parse(cssText: string): Stylesheet; /** * Consumes tokens from a Tokenizer to parse a Stylesheet node. * @param tokenizer A Tokenizer instance. */ parseStylesheet(tokenizer: Tokenizer): Stylesheet; /** * Consumes tokens from a Tokenizer to parse a sequence of rules. * @param tokenizer A Tokenizer instance. * @return A list of nodes corresponding to rules. For a parser * configured with a basic NodeFactory, any of Comment, AtRule, Ruleset, * Declaration and Discarded nodes may be present in the list. */ parseRules(tokenizer: Tokenizer): Rule[]; /** * Consumes tokens from a Tokenizer to parse a single rule. * @param tokenizer A Tokenizer instance. * @return If the current token in the Tokenizer is whitespace, * returns null. Otherwise, returns the next parseable node. */ parseRule(tokenizer: Tokenizer): Rule | null; /** * Consumes tokens from a Tokenizer to parse a Comment node. * @param tokenizer A Tokenizer instance. */ parseComment(tokenizer: Tokenizer): Comment | null; /** * Consumes tokens from a Tokenizer through the next boundary token to * produce a Discarded node. This supports graceful recovery from many * malformed CSS conditions. * @param tokenizer A Tokenizer instance. */ parseUnknown(tokenizer: Tokenizer): Discarded | null; /** * Consumes tokens from a Tokenizer to parse an At Rule node. * @param tokenizer A Tokenizer instance. */ parseAtRule(tokenizer: Tokenizer): AtRule | null; /** * Consumes tokens from a Tokenizer to produce a Rulelist node. * @param tokenizer A Tokenizer instance. */ parseRulelist(tokenizer: Tokenizer): Rulelist; /** * Consumes tokens from a Tokenizer instance to produce a Declaration node or * a Ruleset node, as appropriate. * @param tokenizer A Tokenizer node. */ parseDeclarationOrRuleset(tokenizer: Tokenizer): Declaration | Ruleset | null; } export { Parser };