1 | 'use strict';
|
2 |
|
3 | const parseAttr = require('md-attr-parser');
|
4 | const htmlElemAttr = require('html-element-attributes');
|
5 | const isWhiteSpace = require('is-whitespace-character');
|
6 |
|
7 | const supportedElements = new Set(['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference', 'footnoteCall']);
|
8 | const blockElements = new Set(['atxHeading', 'setextHeading']);
|
9 | const particularElements = new Set(['fencedCode']);
|
10 |
|
11 | const particularTokenize = {};
|
12 |
|
13 | const DOMEventHandler = require('./dom-event-handler.js');
|
14 |
|
15 |
|
16 | const convTypeTag = {
|
17 | image: 'img',
|
18 | link: 'a',
|
19 | heading: 'h1',
|
20 | strong: 'strong',
|
21 | emphasis: 'em',
|
22 | delete: 's',
|
23 | inlineCode: 'code',
|
24 | code: 'code',
|
25 | linkReference: 'a',
|
26 | '*': '*',
|
27 | };
|
28 |
|
29 |
|
30 |
|
31 |
|
32 |
|
33 |
|
34 |
|
35 |
|
36 |
|
37 |
|
38 |
|
39 |
|
40 |
|
41 |
|
42 |
|
43 |
|
44 |
|
45 |
|
46 |
|
47 |
|
48 | function tokenizeGenerator(prefix, oldParser, config) {
|
49 | function token(eat, value, silent) {
|
50 |
|
51 | const self = this;
|
52 | let eaten = oldParser.call(self, eat, value, silent);
|
53 |
|
54 | let index = 0;
|
55 | let parsedAttr;
|
56 | const {length} = value;
|
57 |
|
58 | if (!eaten || !eaten.position) {
|
59 | return undefined;
|
60 | }
|
61 |
|
62 | const type = convTypeTag[eaten.type];
|
63 |
|
64 | index = eaten.position.end.offset - eaten.position.start.offset;
|
65 |
|
66 |
|
67 | if (index + prefix.length < length && value.charAt(index + prefix.length) === '{') {
|
68 |
|
69 | parsedAttr = parseAttr(value, index + prefix.length, config.mdAttrConfig);
|
70 | }
|
71 |
|
72 |
|
73 | if (parsedAttr) {
|
74 | if (config.scope && config.scope !== 'none') {
|
75 | const filtredProp = filterAttributes(parsedAttr.prop, config, type);
|
76 | if (filtredProp !== {}) {
|
77 | if (eaten.data) {
|
78 | eaten.data.hProperties = filtredProp;
|
79 | } else {
|
80 | eaten.data = {hProperties: filtredProp};
|
81 | }
|
82 | }
|
83 | }
|
84 |
|
85 | eaten = eat(prefix + parsedAttr.eaten)(eaten);
|
86 | }
|
87 |
|
88 | return eaten;
|
89 | }
|
90 |
|
91 |
|
92 | return token;
|
93 | }
|
94 |
|
95 | function tokenizeModifierGenerator(oldParser, config) {
|
96 | function token(eat, value, silent) {
|
97 |
|
98 | const self = this;
|
99 | const eaten = oldParser.call(self, eat, value, silent);
|
100 |
|
101 | let index = 0;
|
102 |
|
103 | if (!eaten || !eaten.position ||
|
104 | !eaten.children || eaten.children.length <= 0) {
|
105 | return eaten;
|
106 | }
|
107 |
|
108 | const type = convTypeTag[eaten.type];
|
109 |
|
110 | const lastChild = eaten.children[eaten.children.length - 1];
|
111 |
|
112 | if (!lastChild.value || lastChild.value.length <= 0 ||
|
113 | lastChild.value[lastChild.value.length - 1] !== '}') {
|
114 | return eaten;
|
115 | }
|
116 |
|
117 | index = lastChild.value.lastIndexOf('{');
|
118 |
|
119 | if (index <= 0) {
|
120 | return eaten;
|
121 | }
|
122 |
|
123 | const parsedAttr = parseAttr(lastChild.value, index, config.mdAttrConfig);
|
124 |
|
125 | if (parsedAttr.eaten.length !== lastChild.value.length - index) {
|
126 | return eaten;
|
127 | }
|
128 |
|
129 | index -= 1;
|
130 | while (index >= 0 && isWhiteSpace(lastChild.value[index])) {
|
131 | index -= 1;
|
132 | }
|
133 |
|
134 | if (index < 0) {
|
135 | return eaten;
|
136 | }
|
137 |
|
138 |
|
139 | if (parsedAttr) {
|
140 | if (config.scope && config.scope !== 'none') {
|
141 | const filtredProp = filterAttributes(parsedAttr.prop, config, type);
|
142 | if (filtredProp !== {}) {
|
143 | if (eaten.data) {
|
144 | eaten.data.hProperties = filtredProp;
|
145 | } else {
|
146 | eaten.data = {hProperties: filtredProp};
|
147 | }
|
148 | }
|
149 | }
|
150 |
|
151 | lastChild.value = lastChild.value.slice(0, index + 1);
|
152 | }
|
153 |
|
154 | return eaten;
|
155 | }
|
156 |
|
157 |
|
158 | return token;
|
159 | }
|
160 |
|
161 |
|
162 | function filterAttributes(prop, config, type) {
|
163 | const {scope} = config;
|
164 | const {extend} = config;
|
165 | const {allowDangerousDOMEventHandlers} = config;
|
166 | const specific = htmlElemAttr;
|
167 |
|
168 | const extendTag = (extend => {
|
169 | const t = {};
|
170 | Object.getOwnPropertyNames(extend).forEach(p => {
|
171 | t[convTypeTag[p]] = extend[p];
|
172 | });
|
173 | return t;
|
174 | })(extend);
|
175 |
|
176 |
|
177 | Object.getOwnPropertyNames(prop).forEach(p => {
|
178 | if (p !== 'key' && p !== 'class' && p !== 'id') {
|
179 | prop[p] = prop[p] || '';
|
180 | }
|
181 | });
|
182 |
|
183 | const isDangerous = p => DOMEventHandler.includes(p);
|
184 | const isSpecific = p => type in specific && specific[type].includes(p);
|
185 | const isGlobal = p => htmlElemAttr['*'].includes(p) || p.match(/^aria-[a-z][a-z.-_\d]*$/) || p.match(/^data-[a-z][a-z_.-0-9]*$/);
|
186 |
|
187 | let inScope = () => false;
|
188 |
|
189 |
|
190 | const orFunc = (fun, fun2) => x => fun(x) || fun2(x);
|
191 |
|
192 |
|
193 | switch (scope) {
|
194 | case 'none':
|
195 | break;
|
196 | case 'permissive':
|
197 | case 'every':
|
198 | if (allowDangerousDOMEventHandlers) {
|
199 | inScope = () => true;
|
200 | } else {
|
201 | inScope = x => !isDangerous(x);
|
202 | }
|
203 |
|
204 | break;
|
205 | case 'extended':
|
206 | default:
|
207 | inScope = p => extendTag && type in extendTag && extendTag[type].includes(p);
|
208 | inScope = orFunc(inScope, p => '*' in extendTag && extendTag['*'].includes(p));
|
209 |
|
210 | case 'specific':
|
211 | inScope = orFunc(inScope, isSpecific);
|
212 |
|
213 | case 'global':
|
214 | inScope = orFunc(inScope, isGlobal);
|
215 | if (allowDangerousDOMEventHandlers) {
|
216 | inScope = orFunc(inScope, isDangerous);
|
217 | }
|
218 | }
|
219 |
|
220 |
|
221 | Object.getOwnPropertyNames(prop).forEach(p => {
|
222 | if (!inScope(p)) {
|
223 | delete prop[p];
|
224 | }
|
225 | });
|
226 |
|
227 | return prop;
|
228 | }
|
229 |
|
230 |
|
231 |
|
232 |
|
233 |
|
234 |
|
235 |
|
236 | function tokenizeFencedCode(oldParser, config) {
|
237 | const prefix = '\n';
|
238 | function token(eat, value, silent) {
|
239 |
|
240 | const self = this;
|
241 | let eaten = oldParser.call(self, eat, value, silent);
|
242 |
|
243 | let parsedAttr;
|
244 | const parsedByCustomAttr = false;
|
245 |
|
246 | if (!eaten || !eaten.position) {
|
247 | return undefined;
|
248 | }
|
249 |
|
250 | const type = convTypeTag[eaten.type];
|
251 |
|
252 |
|
253 |
|
254 |
|
255 | if (eaten.lang) {
|
256 |
|
257 | if (eaten.meta) {
|
258 | parsedAttr = parseAttr(eaten.meta);
|
259 | } else {
|
260 |
|
261 |
|
262 |
|
263 | parsedAttr = parseAttr(value, value.indexOf(' '));
|
264 | }
|
265 | }
|
266 |
|
267 |
|
268 | if (parsedAttr) {
|
269 | if (config.scope && config.scope !== 'none') {
|
270 | const filtredProp = filterAttributes(parsedAttr.prop, config, type);
|
271 |
|
272 | if (filtredProp !== {}) {
|
273 | if (eaten.data) {
|
274 | eaten.data.hProperties = {...eaten.data.hProperties, ...filtredProp};
|
275 | } else {
|
276 | eaten.data = {hProperties: filtredProp};
|
277 | }
|
278 | }
|
279 | }
|
280 |
|
281 | if (parsedByCustomAttr) {
|
282 | eaten = eat(prefix + parsedAttr.eaten)(eaten);
|
283 | }
|
284 | }
|
285 |
|
286 | return eaten;
|
287 | }
|
288 |
|
289 |
|
290 |
|
291 | return token;
|
292 | }
|
293 |
|
294 | particularTokenize.fencedCode = tokenizeFencedCode;
|
295 |
|
296 | remarkAttr.SUPPORTED_ELEMENTS = supportedElements;
|
297 |
|
298 | module.exports = remarkAttr;
|
299 |
|
300 |
|
301 | function remarkAttr(userConfig) {
|
302 | const parser = this.Parser;
|
303 |
|
304 | const defaultConfig = {
|
305 | allowDangerousDOMEventHandlers: false,
|
306 | elements: supportedElements,
|
307 | extend: {},
|
308 | scope: 'extended',
|
309 | mdAttrConfig: undefined,
|
310 | enableAtxHeaderInline: true,
|
311 | disableBlockElements: false,
|
312 | };
|
313 | const config = {...defaultConfig, ...userConfig};
|
314 |
|
315 | if (!isRemarkParser(parser)) {
|
316 | throw new Error('Missing parser to attach `remark-attr` [link] (to)');
|
317 | }
|
318 |
|
319 | const tokenizers = parser.prototype.inlineTokenizers;
|
320 | const tokenizersBlock = parser.prototype.blockTokenizers;
|
321 |
|
322 |
|
323 | config.elements.forEach(element => {
|
324 | if ((element in tokenizersBlock || element in tokenizers) &&
|
325 | supportedElements.has(element)) {
|
326 | if (!config.disableBlockElements && blockElements.has(element)) {
|
327 | const oldElement = tokenizersBlock[element];
|
328 | tokenizersBlock[element] = tokenizeGenerator('\n', oldElement, config);
|
329 | } else if (particularElements.has(element)) {
|
330 | const oldElement = tokenizersBlock[element];
|
331 | tokenizersBlock[element] = particularTokenize[element](oldElement, config);
|
332 | } else {
|
333 | const oldElement = tokenizers[element];
|
334 | const elementTokenize = tokenizeGenerator('', oldElement, config);
|
335 | elementTokenize.locator = tokenizers[element].locator;
|
336 | tokenizers[element] = elementTokenize;
|
337 | }
|
338 |
|
339 | if (config.enableAtxHeaderInline && element === 'atxHeading') {
|
340 | const oldElement = tokenizersBlock[element];
|
341 | tokenizersBlock[element] = tokenizeModifierGenerator(oldElement, config);
|
342 | }
|
343 | }
|
344 | });
|
345 | }
|
346 |
|
347 | function isRemarkParser(parser) {
|
348 | return Boolean(
|
349 | parser &&
|
350 | parser.prototype &&
|
351 | parser.prototype.inlineTokenizers &&
|
352 | parser.prototype.inlineTokenizers.link &&
|
353 | parser.prototype.inlineTokenizers.link.locator,
|
354 | );
|
355 | }
|
356 |
|