UNPKG

10 kBJavaScriptView Raw
1'use strict';
2
3const parseAttr = require('md-attr-parser');
4const htmlElemAttr = require('html-element-attributes');
5const isWhiteSpace = require('is-whitespace-character');
6
7const supportedElements = new Set(['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference', 'footnoteCall']);
8const blockElements = new Set(['atxHeading', 'setextHeading']);
9const particularElements = new Set(['fencedCode']);
10
11const particularTokenize = {};
12
13const DOMEventHandler = require('./dom-event-handler.js');
14
15/* Table convertion between type and HTML tagName */
16const convTypeTag = {
17 image: 'img',
18 link: 'a',
19 heading: 'h1',
20 strong: 'strong',
21 emphasis: 'em',
22 delete: 's',
23 inlineCode: 'code',
24 code: 'code',
25 linkReference: 'a',
26 '*': '*',
27};
28
29/* This function is a generic function that transform
30 * the tokenize function a node type to a version that understand
31 * attributes.
32 *
33 * The tokenizer function of strong will tokenize **STRONG STRING**
34 * this function extand it to tokenize **STRONG STRING**{list=of attributes}
35 *
36 * - The prefix is '\n' for block node and '' for inline one
37 *
38 * The syntax is for atxHeading ::
39 * ## HEAD TITLE
40 * {attributes}
41 *
42 * Attributes are on the next line.
43 *
44 * - The old parser is the old function user to tokenize
45 * - The config is the configuration of this plugin
46 *
47 */
48function tokenizeGenerator(prefix, oldParser, config) {
49 function token(eat, value, silent) {
50 // This we call the old tokenize
51 const self = this;
52 let eaten = oldParser.call(self, eat, value, silent);
53
54 let index = 0;
55 let parsedAttr;
56 const {length} = value;
57
58 if (!eaten || !eaten.position) {
59 return undefined;
60 }
61
62 const type = convTypeTag[eaten.type];
63
64 index = eaten.position.end.offset - eaten.position.start.offset;
65
66 // Then we check for attributes
67 if (index + prefix.length < length && value.charAt(index + prefix.length) === '{') {
68 // If any, parse it
69 parsedAttr = parseAttr(value, index + prefix.length, config.mdAttrConfig);
70 }
71
72 // If parsed configure the node
73 if (parsedAttr) {
74 if (config.scope && config.scope !== 'none') {
75 const filtredProp = filterAttributes(parsedAttr.prop, config, type);
76 if (filtredProp !== {}) {
77 if (eaten.data) {
78 eaten.data.hProperties = filtredProp;
79 } else {
80 eaten.data = {hProperties: filtredProp};
81 }
82 }
83 }
84
85 eaten = eat(prefix + parsedAttr.eaten)(eaten);
86 }
87
88 return eaten;
89 }
90
91 // Return the new tokenizer function
92 return token;
93}
94
95function tokenizeModifierGenerator(oldParser, config) {
96 function token(eat, value, silent) {
97 // This we call the old tokenize
98 const self = this;
99 const eaten = oldParser.call(self, eat, value, silent);
100
101 let index = 0;
102
103 if (!eaten || !eaten.position ||
104 !eaten.children || eaten.children.length <= 0) {
105 return eaten;
106 }
107
108 const type = convTypeTag[eaten.type];
109
110 const lastChild = eaten.children[eaten.children.length - 1];
111
112 if (!lastChild.value || lastChild.value.length <= 0 ||
113 lastChild.value[lastChild.value.length - 1] !== '}') {
114 return eaten;
115 }
116
117 index = lastChild.value.lastIndexOf('{');
118
119 if (index <= 0) {
120 return eaten;
121 }
122
123 const parsedAttr = parseAttr(lastChild.value, index, config.mdAttrConfig);
124
125 if (parsedAttr.eaten.length !== lastChild.value.length - index) {
126 return eaten;
127 }
128
129 index -= 1;
130 while (index >= 0 && isWhiteSpace(lastChild.value[index])) {
131 index -= 1;
132 }
133
134 if (index < 0) {
135 return eaten;
136 }
137
138 // If parsed configure the node
139 if (parsedAttr) {
140 if (config.scope && config.scope !== 'none') {
141 const filtredProp = filterAttributes(parsedAttr.prop, config, type);
142 if (filtredProp !== {}) {
143 if (eaten.data) {
144 eaten.data.hProperties = filtredProp;
145 } else {
146 eaten.data = {hProperties: filtredProp};
147 }
148 }
149 }
150
151 lastChild.value = lastChild.value.slice(0, index + 1);
152 }
153
154 return eaten;
155 }
156
157 // Return the new tokenizer function
158 return token;
159}
160
161// A generic function to parse attributes
162function filterAttributes(prop, config, type) {
163 const {scope} = config;
164 const {extend} = config;
165 const {allowDangerousDOMEventHandlers} = config;
166 const specific = htmlElemAttr;
167
168 const extendTag = (extend => {
169 const t = {};
170 Object.getOwnPropertyNames(extend).forEach(p => {
171 t[convTypeTag[p]] = extend[p];
172 });
173 return t;
174 })(extend);
175
176 // Delete empty key/class/id attributes
177 Object.getOwnPropertyNames(prop).forEach(p => {
178 if (p !== 'key' && p !== 'class' && p !== 'id') {
179 prop[p] = prop[p] || '';
180 }
181 });
182
183 const isDangerous = p => DOMEventHandler.includes(p);
184 const isSpecific = p => type in specific && specific[type].includes(p);
185 const isGlobal = p => htmlElemAttr['*'].includes(p) || p.match(/^aria-[a-z][a-z.-_\d]*$/) || p.match(/^data-[a-z][a-z_.-0-9]*$/);
186
187 let inScope = () => false;
188
189 // Function used to `or combine` two other function.
190 const orFunc = (fun, fun2) => x => fun(x) || fun2(x);
191
192 // Respect the scope configuration
193 switch (scope) {
194 case 'none': // Plugin is disabled
195 break;
196 case 'permissive':
197 case 'every':
198 if (allowDangerousDOMEventHandlers) {
199 inScope = () => true;
200 } else {
201 inScope = x => !isDangerous(x);
202 }
203
204 break;
205 case 'extended':
206 default:
207 inScope = p => extendTag && type in extendTag && extendTag[type].includes(p);
208 inScope = orFunc(inScope, p => '*' in extendTag && extendTag['*'].includes(p));
209 // Or if it in the specific scope, fallthrough
210 case 'specific':
211 inScope = orFunc(inScope, isSpecific);
212 // Or if it in the global scope fallthrough
213 case 'global':
214 inScope = orFunc(inScope, isGlobal);
215 if (allowDangerousDOMEventHandlers) { // If allowed add dangerous attributes to global scope
216 inScope = orFunc(inScope, isDangerous);
217 }
218 }
219
220 // If an attributes isn't in the scope, delete it
221 Object.getOwnPropertyNames(prop).forEach(p => {
222 if (!inScope(p)) {
223 delete prop[p];
224 }
225 });
226
227 return prop;
228}
229
230/* This is a special modification of the function tokenizeGenerator
231 * to parse the fencedCode info string and the fallback
232 * customAttr parser
233 *
234 * It's only temporary
235 */
236function tokenizeFencedCode(oldParser, config) {
237 const prefix = '\n';
238 function token(eat, value, silent) {
239 // This we call the old tokenize
240 const self = this;
241 let eaten = oldParser.call(self, eat, value, silent);
242
243 let parsedAttr;
244 const parsedByCustomAttr = false;
245
246 if (!eaten || !eaten.position) {
247 return undefined;
248 }
249
250 const type = convTypeTag[eaten.type];
251
252 // First, parse the info string
253 // which is the 'lang' attributes of 'eaten'.
254
255 if (eaten.lang) {
256 // Then the meta
257 if (eaten.meta) {
258 parsedAttr = parseAttr(eaten.meta);
259 } else {
260 // If it's an old version, we can still find from the attributes
261 // from 'value' ¯\_(ツ)_/¯
262 // Bad hack, will be deleted soon
263 parsedAttr = parseAttr(value, value.indexOf(' '));
264 }
265 }
266
267 // If parsed configure the node
268 if (parsedAttr) {
269 if (config.scope && config.scope !== 'none') {
270 const filtredProp = filterAttributes(parsedAttr.prop, config, type);
271
272 if (filtredProp !== {}) {
273 if (eaten.data) {
274 eaten.data.hProperties = {...eaten.data.hProperties, ...filtredProp};
275 } else {
276 eaten.data = {hProperties: filtredProp};
277 }
278 }
279 }
280
281 if (parsedByCustomAttr) {
282 eaten = eat(prefix + parsedAttr.eaten)(eaten);
283 }
284 }
285
286 return eaten;
287 }
288
289 // Return the new tokenizer function
290
291 return token;
292}
293
294particularTokenize.fencedCode = tokenizeFencedCode;
295
296remarkAttr.SUPPORTED_ELEMENTS = supportedElements;
297
298module.exports = remarkAttr;
299
300/* Function that is exported */
301function remarkAttr(userConfig) {
302 const parser = this.Parser;
303
304 const defaultConfig = {
305 allowDangerousDOMEventHandlers: false,
306 elements: supportedElements,
307 extend: {},
308 scope: 'extended',
309 mdAttrConfig: undefined,
310 enableAtxHeaderInline: true,
311 disableBlockElements: false,
312 };
313 const config = {...defaultConfig, ...userConfig};
314
315 if (!isRemarkParser(parser)) {
316 throw new Error('Missing parser to attach `remark-attr` [link] (to)');
317 }
318
319 const tokenizers = parser.prototype.inlineTokenizers;
320 const tokenizersBlock = parser.prototype.blockTokenizers;
321
322 // For each elements, replace the old tokenizer by the new one
323 config.elements.forEach(element => {
324 if ((element in tokenizersBlock || element in tokenizers) &&
325 supportedElements.has(element)) {
326 if (!config.disableBlockElements && blockElements.has(element)) {
327 const oldElement = tokenizersBlock[element];
328 tokenizersBlock[element] = tokenizeGenerator('\n', oldElement, config);
329 } else if (particularElements.has(element)) {
330 const oldElement = tokenizersBlock[element];
331 tokenizersBlock[element] = particularTokenize[element](oldElement, config);
332 } else {
333 const oldElement = tokenizers[element];
334 const elementTokenize = tokenizeGenerator('', oldElement, config);
335 elementTokenize.locator = tokenizers[element].locator;
336 tokenizers[element] = elementTokenize;
337 }
338
339 if (config.enableAtxHeaderInline && element === 'atxHeading') {
340 const oldElement = tokenizersBlock[element];
341 tokenizersBlock[element] = tokenizeModifierGenerator(oldElement, config);
342 }
343 }
344 });
345}
346
347function isRemarkParser(parser) {
348 return Boolean(
349 parser &&
350 parser.prototype &&
351 parser.prototype.inlineTokenizers &&
352 parser.prototype.inlineTokenizers.link &&
353 parser.prototype.inlineTokenizers.link.locator,
354 );
355}
356