1 | "use strict";
|
2 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
3 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
4 | return new (P || (P = Promise))(function (resolve, reject) {
|
5 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
6 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
7 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
8 | step((generator = generator.apply(thisArg, _arguments || [])).next());
|
9 | });
|
10 | };
|
11 | var __asyncValues = (this && this.__asyncValues) || function (o) {
|
12 | if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
13 | var m = o[Symbol.asyncIterator], i;
|
14 | return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
15 | function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
16 | function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
17 | };
|
18 | var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
19 | var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
20 | if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
21 | var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
22 | return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
23 | function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
24 | function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
25 | function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
26 | function fulfill(value) { resume("next", value); }
|
27 | function reject(value) { resume("throw", value); }
|
28 | function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
29 | };
|
30 | Object.defineProperty(exports, "__esModule", { value: true });
|
31 | const tree_path_1 = require("@atomist/tree-path");
|
32 | const _ = require("lodash");
|
33 | const logger_1 = require("../../util/logger");
|
34 | const HasCache_1 = require("../../project/HasCache");
|
35 | const projectUtils_1 = require("../../project/util/projectUtils");
|
36 | const sourceLocationUtils_1 = require("../../project/util/sourceLocationUtils");
|
37 | const FileHits_1 = require("./FileHits");
|
38 | const FileParser_1 = require("./FileParser");
|
39 |
|
40 |
|
41 |
|
42 |
|
43 |
|
44 |
|
45 |
|
46 |
|
47 |
|
48 |
|
49 |
|
50 | function findMatches(p, parseWith, globPatterns, pathExpression, functionRegistry) {
|
51 | return __awaiter(this, void 0, void 0, function* () {
|
52 | const fileHits = yield fileMatches(p, { parseWith, globPatterns, pathExpression, functionRegistry });
|
53 | return _.flatten(fileHits.map(f => f.matches));
|
54 | });
|
55 | }
|
56 | exports.findMatches = findMatches;
|
57 |
|
58 |
|
59 |
|
60 |
|
61 |
|
62 |
|
63 |
|
64 | function matches(p, peqo) {
|
65 | return __awaiter(this, void 0, void 0, function* () {
|
66 | const fileHits = yield fileMatches(p, peqo);
|
67 | return _.flatten(fileHits.map(f => f.matches));
|
68 | });
|
69 | }
|
70 | exports.matches = matches;
|
71 |
|
72 |
|
73 |
|
74 |
|
75 |
|
76 |
|
77 |
|
78 | function matchIterator(p, opts) {
|
79 | return __asyncGenerator(this, arguments, function* matchIterator_1() {
|
80 | var e_1, _a;
|
81 | const fileHits = fileHitIterator(p, opts);
|
82 | try {
|
83 | for (var fileHits_1 = __asyncValues(fileHits), fileHits_1_1; fileHits_1_1 = yield __await(fileHits_1.next()), !fileHits_1_1.done;) {
|
84 | const fileHit = fileHits_1_1.value;
|
85 | try {
|
86 | for (const match of fileHit.matches) {
|
87 | yield yield __await(match);
|
88 | }
|
89 | }
|
90 | finally {
|
91 |
|
92 | yield __await(p.flush());
|
93 | }
|
94 | }
|
95 | }
|
96 | catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
97 | finally {
|
98 | try {
|
99 | if (fileHits_1_1 && !fileHits_1_1.done && (_a = fileHits_1.return)) yield __await(_a.call(fileHits_1));
|
100 | }
|
101 | finally { if (e_1) throw e_1.error; }
|
102 | }
|
103 | });
|
104 | }
|
105 | exports.matchIterator = matchIterator;
|
106 |
|
107 |
|
108 |
|
109 |
|
110 |
|
111 |
|
112 |
|
113 |
|
114 |
|
115 |
|
116 |
|
117 |
|
118 |
|
119 |
|
120 | function gatherFromMatches(p, parserOrRegistry, globPatterns, pathExpression, mapper, functionRegistry) {
|
121 | return __awaiter(this, void 0, void 0, function* () {
|
122 | return gather(p, { parseWith: parserOrRegistry, globPatterns, pathExpression, mapper, functionRegistry });
|
123 | });
|
124 | }
|
125 | exports.gatherFromMatches = gatherFromMatches;
|
126 |
|
127 |
|
128 |
|
129 |
|
130 |
|
131 |
|
132 |
|
133 |
|
134 |
|
135 | function gather(p, peqo) {
|
136 | return __awaiter(this, void 0, void 0, function* () {
|
137 | const fileHits = yield fileMatches(p, peqo);
|
138 | return _.flatten(fileHits.map(f => f.matches.map(peqo.mapper).filter(x => !!x)));
|
139 | });
|
140 | }
|
141 | exports.gather = gather;
|
142 |
|
143 |
|
144 |
|
145 | function gatherWithLocation(p, peqo) {
|
146 | return __awaiter(this, void 0, void 0, function* () {
|
147 | const fileHits = yield fileMatches(p, peqo);
|
148 | const result = [];
|
149 | for (const fileHit of fileHits) {
|
150 | const values = fileHit.matches.map(peqo.mapper).filter(x => !!x);
|
151 | values.forEach((value, index) => result.push({ value, file: fileHit.file, matchResult: fileHit.matches[index] }));
|
152 | }
|
153 | return result;
|
154 | });
|
155 | }
|
156 | exports.gatherWithLocation = gatherWithLocation;
|
157 |
|
158 |
|
159 |
|
160 |
|
161 |
|
162 |
|
163 |
|
164 |
|
165 |
|
166 |
|
167 |
|
168 | function findFileMatches(p, parseWith, globPatterns, pathExpression, functionRegistry) {
|
169 | return __awaiter(this, void 0, void 0, function* () {
|
170 | return fileMatches(p, { parseWith, globPatterns, pathExpression, functionRegistry });
|
171 | });
|
172 | }
|
173 | exports.findFileMatches = findFileMatches;
|
174 |
|
175 |
|
176 |
|
177 |
|
178 |
|
179 |
|
180 |
|
181 | function fileMatches(p, peqo) {
|
182 | return __awaiter(this, void 0, void 0, function* () {
|
183 | const parsed = tree_path_1.toPathExpression(peqo.pathExpression);
|
184 | const parser = findParser(parsed, peqo.parseWith);
|
185 | if (!parser) {
|
186 | throw new Error(`Cannot find parser for path expression [${peqo.pathExpression}]: Using ${peqo.parseWith}`);
|
187 | }
|
188 | const valuesToCheckFor = literalValues(parsed);
|
189 | const files = yield projectUtils_1.gatherFromFiles(p, peqo.globPatterns, file => parseFile(parser, parsed, peqo.functionRegistry, p, file, valuesToCheckFor, undefined, peqo.cacheAst !== false));
|
190 | const all = yield Promise.all(files);
|
191 | return all.filter(x => !!x);
|
192 | });
|
193 | }
|
194 | exports.fileMatches = fileMatches;
|
195 |
|
196 |
|
197 |
|
198 |
|
199 |
|
200 | function fileHitIterator(p, opts) {
|
201 | return __asyncGenerator(this, arguments, function* fileHitIterator_1() {
|
202 | var e_2, _a;
|
203 | const parsed = tree_path_1.toPathExpression(opts.pathExpression);
|
204 | const parser = findParser(parsed, opts.parseWith);
|
205 | if (!parser) {
|
206 | throw new Error(`Cannot find parser for path expression [${opts.pathExpression}]: Using ${opts.parseWith}`);
|
207 | }
|
208 | const valuesToCheckFor = literalValues(parsed);
|
209 | try {
|
210 | for (var _b = __asyncValues(projectUtils_1.fileIterator(p, opts.globPatterns, opts.fileFilter)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
211 | const file = _c.value;
|
212 | const fileHit = yield __await(parseFile(parser, parsed, opts.functionRegistry, p, file, valuesToCheckFor, opts.testWith, opts.cacheAst !== false));
|
213 | if (!!fileHit) {
|
214 | yield yield __await(fileHit);
|
215 | }
|
216 | }
|
217 | }
|
218 | catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
219 | finally {
|
220 | try {
|
221 | if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
222 | }
|
223 | finally { if (e_2) throw e_2.error; }
|
224 | }
|
225 | });
|
226 | }
|
227 | exports.fileHitIterator = fileHitIterator;
|
228 | function parseFile(parser, pex, functionRegistry, p, file, valuesToCheckFor, matchTester, cacheAst) {
|
229 | return __awaiter(this, void 0, void 0, function* () {
|
230 |
|
231 | if (valuesToCheckFor.length > 0) {
|
232 | const content = yield file.getContent();
|
233 | if (valuesToCheckFor.some(literal => !content.includes(literal))) {
|
234 | return undefined;
|
235 | }
|
236 | }
|
237 | if (!!parser.couldBeMatchesInThisFile && !(yield parser.couldBeMatchesInThisFile(pex, file))) {
|
238 |
|
239 | return undefined;
|
240 | }
|
241 |
|
242 | try {
|
243 |
|
244 | const topLevelProduction = yield HasCache_1.retrieveOrCompute(file, `ast_${parser.rootName}`, (f) => __awaiter(this, void 0, void 0, function* () {
|
245 | const prod = yield parser.toAst(f);
|
246 | tree_path_1.defineDynamicProperties(prod);
|
247 | return prod;
|
248 | }), cacheAst);
|
249 | logger_1.logger.debug("Successfully parsed file '%s' to AST with root node named '%s'. Will execute '%s'", file.path, topLevelProduction.$name, tree_path_1.stringify(pex));
|
250 | const fileNode = {
|
251 | path: file.path,
|
252 | name: file.name,
|
253 | $name: file.name,
|
254 | $children: [topLevelProduction],
|
255 | };
|
256 | const r = tree_path_1.evaluateExpression(fileNode, pex, functionRegistry);
|
257 | if (tree_path_1.isSuccessResult(r)) {
|
258 | logger_1.logger.debug("%d matches in file '%s'", r.length, file.path);
|
259 | return fillInSourceLocations(file, r)
|
260 | .then(locatedNodes => {
|
261 | if (matchTester) {
|
262 | return matchTester(file)
|
263 | .then(test => new FileHits_1.FileHit(p, file, fileNode, locatedNodes.filter(test)));
|
264 | }
|
265 | return new FileHits_1.FileHit(p, file, fileNode, locatedNodes);
|
266 | });
|
267 | }
|
268 | else {
|
269 | logger_1.logger.debug("No matches in file '%s'", file.path);
|
270 | return undefined;
|
271 | }
|
272 | }
|
273 | catch (err) {
|
274 | logger_1.logger.debug("Failed to parse file '%s': %s", file.path, err);
|
275 | return undefined;
|
276 | }
|
277 | });
|
278 | }
|
279 |
|
280 |
|
281 |
|
282 |
|
283 |
|
284 |
|
285 | function fillInSourceLocations(f, nodes) {
|
286 | if (nodes.length === 0) {
|
287 |
|
288 |
|
289 | return Promise.resolve(nodes);
|
290 | }
|
291 | return f.getContent()
|
292 | .then(content => {
|
293 | nodes.forEach(n => {
|
294 | n.sourceLocation = sourceLocationUtils_1.toSourceLocation(f, content, n.$offset);
|
295 | });
|
296 | return nodes;
|
297 | });
|
298 | }
|
299 |
|
300 |
|
301 |
|
302 |
|
303 |
|
304 |
|
305 |
|
306 |
|
307 |
|
308 |
|
309 | function findValues(p, parserOrRegistry, globPatterns, pathExpression, functionRegistry) {
|
310 | return fileMatches(p, { parseWith: parserOrRegistry, globPatterns, pathExpression, functionRegistry })
|
311 | .then(fileHits => _.flatten(fileHits.map(f => f.matches))
|
312 | .map(m => m.$value));
|
313 | }
|
314 | exports.findValues = findValues;
|
315 |
|
316 |
|
317 |
|
318 |
|
319 |
|
320 |
|
321 |
|
322 |
|
323 |
|
324 |
|
325 | function zapAllMatches(p, parserOrRegistry, globPatterns, pathExpression, opts = {}) {
|
326 | return doWithAllMatches(p, parserOrRegistry, globPatterns, pathExpression, m => m.zap(opts));
|
327 | }
|
328 | exports.zapAllMatches = zapAllMatches;
|
329 |
|
330 |
|
331 |
|
332 |
|
333 |
|
334 |
|
335 |
|
336 |
|
337 |
|
338 |
|
339 | function doWithAllMatches(p, parserOrRegistry, globPatterns, pathExpression, action) {
|
340 | return __awaiter(this, void 0, void 0, function* () {
|
341 | const fileHits = yield fileMatches(p, { parseWith: parserOrRegistry, globPatterns, pathExpression });
|
342 | fileHits.forEach(fh => applyActionToMatches(fh, action));
|
343 | return p.flush();
|
344 | });
|
345 | }
|
346 | exports.doWithAllMatches = doWithAllMatches;
|
347 | function applyActionToMatches(fh, action) {
|
348 |
|
349 | const sorted = fh.matches.sort((m1, m2) => m1.$offset - m2.$offset);
|
350 | sorted.forEach(action);
|
351 | }
|
352 | function findParser(pathExpression, fp) {
|
353 | if (FileParser_1.isFileParser(fp)) {
|
354 | if (!!fp.validate) {
|
355 | fp.validate(pathExpression);
|
356 | }
|
357 | return fp;
|
358 | }
|
359 | else {
|
360 | return fp.parserFor(pathExpression);
|
361 | }
|
362 | }
|
363 | exports.findParser = findParser;
|
364 |
|
365 |
|
366 |
|
367 |
|
368 |
|
369 |
|
370 |
|
371 | function literalValues(pex) {
|
372 | return _.uniq(allPredicates(pex)
|
373 | .filter(isAttributeEqualityPredicate)
|
374 | .map(p => p.value));
|
375 | }
|
376 | exports.literalValues = literalValues;
|
377 | function allPredicates(pe) {
|
378 | if (tree_path_1.isUnionPathExpression(pe)) {
|
379 | return _.flatten(pe.unions.map(allPredicates));
|
380 | }
|
381 | return _.flatten(pe.locationSteps.map(s => {
|
382 | return _.flatten(s.predicates.map(p => {
|
383 | if (isNestedPredicate(p)) {
|
384 | return allPredicates(p.pathExpression);
|
385 | }
|
386 | else {
|
387 | return [p];
|
388 | }
|
389 | }));
|
390 | }));
|
391 | }
|
392 | function isAttributeEqualityPredicate(p) {
|
393 | const maybe = p;
|
394 | return !!maybe.value;
|
395 | }
|
396 | function isNestedPredicate(p) {
|
397 | const maybe = p;
|
398 | return !!maybe.pathExpression;
|
399 | }
|
400 |
|
\ | No newline at end of file |