1 | import {ContextualKeyword} from "../parser/tokenizer/keywords";
|
2 |
|
3 |
|
4 |
|
5 |
|
6 |
|
7 |
|
8 |
|
9 |
|
10 |
|
11 |
|
12 |
|
13 | export default function isAsyncOperation(tokens) {
|
14 | let index = tokens.currentIndex();
|
15 | let depth = 0;
|
16 | const startToken = tokens.currentToken();
|
17 | do {
|
18 | const token = tokens.tokens[index];
|
19 | if (token.isOptionalChainStart) {
|
20 | depth++;
|
21 | }
|
22 | if (token.isOptionalChainEnd) {
|
23 | depth--;
|
24 | }
|
25 | depth += token.numNullishCoalesceStarts;
|
26 | depth -= token.numNullishCoalesceEnds;
|
27 |
|
28 | if (
|
29 | token.contextualKeyword === ContextualKeyword._await &&
|
30 | token.identifierRole == null &&
|
31 | token.scopeDepth === startToken.scopeDepth
|
32 | ) {
|
33 | return true;
|
34 | }
|
35 | index += 1;
|
36 | } while (depth > 0 && index < tokens.tokens.length);
|
37 | return false;
|
38 | }
|