UNPKG

7.83 kBJavaScriptView Raw
1'use strict';
2
3const parseAttr = require('md-attr-parser');
4const htmlElemAttr = require('html-element-attributes');
5
6const supportedElements = ['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode'];
7const blockElements = ['atxHeading', 'setextHeading'];
8const particularElements = ['fencedCode'];
9
10const particularTokenize = {};
11
12const DOMEventHandler = require('./dom-event-handler.js');
13
14/* Table convertion between type and HTML tagName */
15const convTypeTag = {
16 image: 'img',
17 link: 'a',
18 heading: 'h1',
19 strong: 'strong',
20 emphasis: 'em',
21 delete: 's',
22 inlineCode: 'code',
23 code: 'code',
24 '*': '*',
25};
26
27/* This function is a generic function that transform
28 * the tokenize function a node type to a version that understand
29 * attributes.
30 *
31 * The tokenizer function of strong will tokenize **STRONG STRING**
32 * this function extand it to tokenize **STRONG STRING**{list=of attributes}
33 *
34 * - The prefix is '\n' for block node and '' for inline one
35 *
36 * The syntax is for atxHeading ::
37 * ## HEAD TITLE
38 * {attributes}
39 *
40 * Attributes are on the next line.
41 *
42 * - The old parser is the old function user to tokenize
43 * - The config is the configuration of this plugin
44 *
45 */
46function tokenizeGenerator(prefix, oldParser, config) {
47 function token(eat, value, silent) {
48 // This we call the old tokenize
49 const self = this;
50 let eaten = oldParser.call(self, eat, value, silent);
51
52 let index = 0;
53 let parsedAttr;
54 const {length} = value;
55
56 if (!eaten || !eaten.position) {
57 return undefined;
58 }
59
60 const type = convTypeTag[eaten.type];
61
62 index = eaten.position.end.offset - eaten.position.start.offset;
63
64 // Then we check for attributes
65 if (index + prefix.length < length && value.charAt(index + prefix.length) === '{') {
66 // If any, parse it
67 parsedAttr = parseAttr(value, index + prefix.length, config.mdAttrConfig);
68 }
69
70 // If parsed configure the node
71 if (parsedAttr) {
72 if (config.scope && config.scope !== 'none') {
73 const filtredProp = filterAttributes(parsedAttr.prop, config, type);
74 if (filtredProp !== {}) {
75 if (eaten.data) {
76 eaten.data.hProperties = filtredProp;
77 } else {
78 eaten.data = {hProperties: filtredProp};
79 }
80 }
81 }
82 eaten = eat(prefix + parsedAttr.eaten)(eaten);
83 }
84
85 return eaten;
86 }
87 // Return the new tokenizer function
88 return token;
89}
90
91// A generic function to parse attributes
92function filterAttributes(prop, config, type) {
93 const {scope} = config;
94 const {extend} = config;
95 const {allowDangerousDOMEventHandlers} = config;
96 const specific = htmlElemAttr;
97
98 const extendTag = (extend => {
99 const t = {};
100 Object.getOwnPropertyNames(extend).forEach(p => {
101 t[convTypeTag[p]] = extend[p];
102 });
103 return t;
104 })(extend);
105
106 // Delete empty key/class/id attributes
107 Object.getOwnPropertyNames(prop).forEach(p => {
108 if (p !== 'key' && p !== 'class' && p !== 'id') {
109 prop[p] = prop[p] || '';
110 }
111 });
112
113 const isDangerous = p => DOMEventHandler.indexOf(p) >= 0;
114 const isSpecific = p => type in specific && specific[type].indexOf(p) >= 0;
115 const isGlobal = p => htmlElemAttr['*'].indexOf(p) >= 0 || p.match(/^aria-[a-z]{3,24}$/);
116
117 let inScope = _ => false;
118
119 // Function used to `or combine` two other function.
120 const orFunc = (fun, fun2) => x => fun(x) || fun2(x);
121
122 // Respect the scope configuration
123 switch (scope) {
124 case 'none': // Plugin is disabled
125 break;
126 case 'permissive':
127 case 'every':
128 if (allowDangerousDOMEventHandlers) {
129 inScope = _ => true;
130 } else {
131 inScope = x => !isDangerous(x);
132 }
133 break;
134 case 'extended':
135 default:
136 inScope = p => extendTag && type in extendTag && extendTag[type].indexOf(p) >= 0;
137 inScope = orFunc(inScope, p => '*' in extendTag && extendTag['*'].indexOf(p) >= 0);
138 // Or if it in the specific scope, fallthrough
139 case 'specific':
140 inScope = orFunc(inScope, isSpecific);
141 // Or if it in the global scope fallthrough
142 case 'global':
143 inScope = orFunc(inScope, isGlobal);
144 if (allowDangerousDOMEventHandlers) { // If allowed add dangerous attributes to global scope
145 inScope = orFunc(inScope, isDangerous);
146 }
147 }
148
149 // If an attributes isn't in the scope, delete it
150 Object.getOwnPropertyNames(prop).forEach(p => {
151 if (!inScope(p)) {
152 delete prop[p];
153 }
154 });
155
156 return prop;
157}
158
159/* This is a special modification of the function tokenizeGenerator
160 * to parse the fencedCode info string and the fallback
161 * customAttr parser
162 *
163 * It's only temporary
164 */
165function tokenizeFencedCode(oldParser, config) {
166 const prefix = '\n';
167 function token(eat, value, silent) {
168 // This we call the old tokenize
169 const self = this;
170 let eaten = oldParser.call(self, eat, value, silent);
171
172 let parsedAttr;
173 const parsedByCustomAttr = false;
174
175 if (!eaten || !eaten.position) {
176 return undefined;
177 }
178
179 const type = convTypeTag[eaten.type];
180
181 // First, parse the info string
182 // which is the 'lang' attributes of 'eaten'.
183
184 if (eaten.lang) {
185 // Then the meta
186 if (eaten.meta) {
187 parsedAttr = parseAttr(eaten.meta);
188 } else {
189 // If it's an old version, we can still find from the attributes
190 // from 'value' ¯\_(ツ)_/¯
191 // Bad hack, will be deleted soon
192 parsedAttr = parseAttr(value, value.indexOf(' '));
193 }
194 }
195
196 // If parsed configure the node
197 if (parsedAttr) {
198 if (config.scope && config.scope !== 'none') {
199 const filtredProp = filterAttributes(parsedAttr.prop, config, type);
200
201 if (filtredProp !== {}) {
202 if (eaten.data) {
203 eaten.data.hProperties = {...eaten.data.hProperties, ...filtredProp};
204 } else {
205 eaten.data = {hProperties: filtredProp};
206 }
207 }
208 }
209 if (parsedByCustomAttr) {
210 eaten = eat(prefix + parsedAttr.eaten)(eaten);
211 }
212 }
213
214 return eaten;
215 }
216
217 // Return the new tokenizer function
218
219 return token;
220}
221
222particularTokenize.fencedCode = tokenizeFencedCode;
223
224remarkAttr.SUPPORTED_ELEMENTS = supportedElements;
225
226module.exports = remarkAttr;
227
228/* Function that is exported */
229function remarkAttr(userConfig) {
230 const parser = this.Parser;
231
232 const defaultConfig = {
233 allowDangerousDOMEventHandlers: false,
234 elements: supportedElements,
235 extend: {},
236 scope: 'extended',
237 mdAttrConfig: undefined,
238 };
239 const config = {...defaultConfig, ...userConfig};
240
241 if (!isRemarkParser(parser)) {
242 throw new Error('Missing parser to attach `remark-attr` [link] (to)');
243 }
244
245 const tokenizers = parser.prototype.inlineTokenizers;
246 const tokenizersBlock = parser.prototype.blockTokenizers;
247
248 // For each elements, replace the old tokenizer by the new one
249 config.elements.forEach(elem => {
250 if (supportedElements.indexOf(elem) >= 0) {
251 if (blockElements.indexOf(elem) >= 0) {
252 const oldElem = tokenizersBlock[elem];
253 tokenizersBlock[elem] = tokenizeGenerator('\n', oldElem, config);
254 } else if (particularElements.indexOf(elem) >= 0) {
255 const oldElem = tokenizersBlock[elem];
256 tokenizersBlock[elem] = particularTokenize[elem](oldElem, config);
257 } else {
258 const oldElem = tokenizers[elem];
259 const elemTokenize = tokenizeGenerator('', oldElem, config);
260 elemTokenize.locator = tokenizers[elem].locator;
261 tokenizers[elem] = elemTokenize;
262 }
263 }
264 });
265}
266
267function isRemarkParser(parser) {
268 return Boolean(
269 parser &&
270 parser.prototype &&
271 parser.prototype.inlineTokenizers &&
272 parser.prototype.inlineTokenizers.link &&
273 parser.prototype.inlineTokenizers.link.locator
274 );
275}
276