babel / babel
Showing 15 of 48 files from the diff.
Other files ignored by Codecov
babel.config.js has changed.

@@ -10,7 +10,7 @@
Loading
10 10
} from "./plugin-utils";
11 11
import Parser from "./parser";
12 12
13 -
import { types as tokTypes } from "./tokenizer/types";
13 +
import { getExportedToken, tt as internalTokenTypes } from "./tokenizer/types";
14 14
import "./tokenizer/context";
15 15
16 16
import type { Expression, File } from "./types";
@@ -67,7 +67,15 @@
Loading
67 67
  return parser.getExpression();
68 68
}
69 69
70 -
export { tokTypes };
70 +
function generateExportedTokenTypes(internalTokenTypes) {
71 +
  const tokenTypes = {};
72 +
  for (const typeName of Object.keys(internalTokenTypes)) {
73 +
    tokenTypes[typeName] = getExportedToken(internalTokenTypes[typeName]);
74 +
  }
75 +
  return tokenTypes;
76 +
}
77 +
78 +
export const tokTypes = generateExportedTokenTypes(internalTokenTypes);
71 79
72 80
function getParser(options: ?Options, input: string): Parser {
73 81
  let cls = Parser;

@@ -6,7 +6,7 @@
Loading
6 6
import { Position } from "../util/location";
7 7
8 8
import { types as ct, type TokContext } from "./context";
9 -
import { types as tt, type TokenType } from "./types";
9 +
import { tt, type TokenType } from "./types";
10 10
import type { ParsingError, ErrorTemplate } from "../parser/error";
11 11
12 12
type TopicContextState = {

@@ -2,7 +2,7 @@
Loading
2 2
3 3
/*:: declare var invariant; */
4 4
import * as charCodes from "charcodes";
5 -
import { types as tt, type TokenType } from "../tokenizer/types";
5 +
import { tt, type TokenType } from "../tokenizer/types";
6 6
import type {
7 7
  TSParameterProperty,
8 8
  Decorator,

@@ -1,7 +1,12 @@
Loading
1 1
// @flow
2 2
3 3
import * as N from "../types";
4 -
import { types as tt, type TokenType } from "../tokenizer/types";
4 +
import {
5 +
  tokenIsLoop,
6 +
  tt,
7 +
  type TokenType,
8 +
  getExportedToken,
9 +
} from "../tokenizer/types";
5 10
import ExpressionParser from "./expression";
6 11
import { Errors, SourceTypeModuleErrors } from "./error";
7 12
import { isIdentifierChar, isIdentifierStart } from "../util/identifier";
@@ -56,10 +61,11 @@
Loading
56 61
 * @returns
57 62
 */
58 63
function babel7CompatTokens(tokens) {
59 -
  if (!process.env.BABEL_8_BREAKING) {
60 -
    for (let i = 0; i < tokens.length; i++) {
61 -
      const token = tokens[i];
62 -
      if (token.type === tt.privateName) {
64 +
  for (let i = 0; i < tokens.length; i++) {
65 +
    const token = tokens[i];
66 +
    const { type } = token;
67 +
    if (type === tt.privateName) {
68 +
      if (!process.env.BABEL_8_BREAKING) {
63 69
        const { loc, start, value, end } = token;
64 70
        const hashEndPos = start + 1;
65 71
        const hashEndLoc = new Position(loc.start.line, loc.start.column + 1);
@@ -68,7 +74,7 @@
Loading
68 74
          1,
69 75
          // $FlowIgnore: hacky way to create token
70 76
          new Token({
71 -
            type: tt.hash,
77 +
            type: getExportedToken(tt.hash),
72 78
            value: "#",
73 79
            start: start,
74 80
            end: hashEndPos,
@@ -77,7 +83,7 @@
Loading
77 83
          }),
78 84
          // $FlowIgnore: hacky way to create token
79 85
          new Token({
80 -
            type: tt.name,
86 +
            type: getExportedToken(tt.name),
81 87
            value: value,
82 88
            start: hashEndPos,
83 89
            end: end,
@@ -85,8 +91,14 @@
Loading
85 91
            endLoc: loc.end,
86 92
          }),
87 93
        );
94 +
        i++;
95 +
        continue;
88 96
      }
89 97
    }
98 +
    if (typeof type === "number") {
99 +
      // $FlowIgnore: we manipulate `token` for performance reasons
100 +
      token.type = getExportedToken(type);
101 +
    }
90 102
  }
91 103
  return tokens;
92 104
}
@@ -246,9 +258,9 @@
Loading
246 258
247 259
    switch (starttype) {
248 260
      case tt._break:
261 +
        return this.parseBreakContinueStatement(node, /* isBreak */ true);
249 262
      case tt._continue:
250 -
        // $FlowFixMe
251 -
        return this.parseBreakContinueStatement(node, starttype.keyword);
263 +
        return this.parseBreakContinueStatement(node, /* isBreak */ false);
252 264
      case tt._debugger:
253 265
        return this.parseDebuggerStatement(node);
254 266
      case tt._do:
@@ -472,9 +484,8 @@
Loading
472 484
473 485
  parseBreakContinueStatement(
474 486
    node: N.BreakStatement | N.ContinueStatement,
475 -
    keyword: string,
487 +
    isBreak: boolean,
476 488
  ): N.BreakStatement | N.ContinueStatement {
477 -
    const isBreak = keyword === "break";
478 489
    this.next();
479 490
480 491
    if (this.isLineTerminator()) {
@@ -484,7 +495,7 @@
Loading
484 495
      this.semicolon();
485 496
    }
486 497
487 -
    this.verifyBreakContinue(node, keyword);
498 +
    this.verifyBreakContinue(node, isBreak);
488 499
489 500
    return this.finishNode(
490 501
      node,
@@ -494,9 +505,8 @@
Loading
494 505
495 506
  verifyBreakContinue(
496 507
    node: N.BreakStatement | N.ContinueStatement,
497 -
    keyword: string,
508 +
    isBreak: boolean,
498 509
  ) {
499 -
    const isBreak = keyword === "break";
500 510
    let i;
501 511
    for (i = 0; i < this.state.labels.length; ++i) {
502 512
      const lab = this.state.labels[i];
@@ -506,7 +516,11 @@
Loading
506 516
      }
507 517
    }
508 518
    if (i === this.state.labels.length) {
509 -
      this.raise(node.start, Errors.IllegalBreakContinue, keyword);
519 +
      this.raise(
520 +
        node.start,
521 +
        Errors.IllegalBreakContinue,
522 +
        isBreak ? "break" : "continue",
523 +
      );
510 524
    }
511 525
  }
512 526
@@ -850,7 +864,7 @@
Loading
850 864
      }
851 865
    }
852 866
853 -
    const kind = this.state.type.isLoop
867 +
    const kind = tokenIsLoop(this.state.type)
854 868
      ? "loop"
855 869
      : this.match(tt._switch)
856 870
      ? "switch"
@@ -1994,7 +2008,8 @@
Loading
1994 2008
  }
1995 2009
1996 2010
  shouldParseExportDeclaration(): boolean {
1997 -
    if (this.match(tt.at)) {
2011 +
    const { type } = this.state;
2012 +
    if (type === tt.at) {
1998 2013
      this.expectOnePlugin(["decorators", "decorators-legacy"]);
1999 2014
      if (this.hasPlugin("decorators")) {
2000 2015
        if (this.getPluginOption("decorators", "decoratorsBeforeExport")) {
@@ -2006,10 +2021,10 @@
Loading
2006 2021
    }
2007 2022
2008 2023
    return (
2009 -
      this.state.type.keyword === "var" ||
2010 -
      this.state.type.keyword === "const" ||
2011 -
      this.state.type.keyword === "function" ||
2012 -
      this.state.type.keyword === "class" ||
2024 +
      type === tt._var ||
2025 +
      type === tt._const ||
2026 +
      type === tt._function ||
2027 +
      type === tt._class ||
2013 2028
      this.isLet() ||
2014 2029
      this.isAsyncFunction()
2015 2030
    );

@@ -1,5 +1,5 @@
Loading
1 1
import type Parser from "../parser";
2 -
import { types as tt } from "../tokenizer/types";
2 +
import { tt } from "../tokenizer/types";
3 3
import * as N from "../types";
4 4
5 5
export default (superClass: Class<Parser>): Class<Parser> =>

@@ -18,7 +18,19 @@
Loading
18 18
//
19 19
// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser
20 20
21 -
import { types as tt, type TokenType } from "../tokenizer/types";
21 +
import {
22 +
  tokenCanStartExpression,
23 +
  tokenIsAssignment,
24 +
  tokenIsKeyword,
25 +
  tokenIsOperator,
26 +
  tokenIsPostfix,
27 +
  tokenIsPrefix,
28 +
  tokenIsRightAssociative,
29 +
  tokenLabelName,
30 +
  tokenOperatorPrecedence,
31 +
  tt,
32 +
  type TokenType,
33 +
} from "../tokenizer/types";
22 34
import * as N from "../types";
23 35
import LValParser from "./lval";
24 36
import {
@@ -287,7 +299,7 @@
Loading
287 299
    if (afterLeftParse) {
288 300
      left = afterLeftParse.call(this, left, startPos, startLoc);
289 301
    }
290 -
    if (this.state.type.isAssign) {
302 +
    if (tokenIsAssignment(this.state.type)) {
291 303
      const node = this.startNodeAt(startPos, startLoc);
292 304
      const operator = this.state.value;
293 305
      node.operator = operator;
@@ -394,8 +406,7 @@
Loading
394 406
      const { start } = left;
395 407
396 408
      if (
397 -
        // TODO: When migrating to TS, use tt._in.binop!
398 -
        minPrec >= ((tt._in.binop: any): number) ||
409 +
        minPrec >= tokenOperatorPrecedence(tt._in) ||
399 410
        !this.prodParam.hasIn ||
400 411
        !this.match(tt._in)
401 412
      ) {
@@ -405,10 +416,10 @@
Loading
405 416
      this.classScope.usePrivateName(value, start);
406 417
    }
407 418
408 -
    let prec = this.state.type.binop;
409 -
    if (prec != null && (this.prodParam.hasIn || !this.match(tt._in))) {
419 +
    const op = this.state.type;
420 +
    if (tokenIsOperator(op) && (this.prodParam.hasIn || !this.match(tt._in))) {
421 +
      let prec = tokenOperatorPrecedence(op);
410 422
      if (prec > minPrec) {
411 -
        const op = this.state.type;
412 423
        if (op === tt.pipeline) {
413 424
          this.expectPlugin("pipelineOperator");
414 425
          if (this.state.inFSharpPipelineDirectBody) {
@@ -426,7 +437,7 @@
Loading
426 437
        if (coalesce) {
427 438
          // Handle the precedence of `tt.coalesce` as equal to the range of logical expressions.
428 439
          // In other words, `node.right` shouldn't contain logical expressions in order to check the mixed error.
429 -
          prec = ((tt.logicalAND: any): { binop: number }).binop;
440 +
          prec = tokenOperatorPrecedence(tt.logicalAND);
430 441
        }
431 442
432 443
        this.next();
@@ -524,7 +535,7 @@
Loading
524 535
      this.parseMaybeUnaryOrPrivate(),
525 536
      startPos,
526 537
      startLoc,
527 -
      op.rightAssociative ? prec - 1 : prec,
538 +
      tokenIsRightAssociative(op) ? prec - 1 : prec,
528 539
    );
529 540
  }
530 541
@@ -576,7 +587,7 @@
Loading
576 587
    }
577 588
    const update = this.match(tt.incDec);
578 589
    const node = this.startNode();
579 -
    if (this.state.type.prefix) {
590 +
    if (tokenIsPrefix(this.state.type)) {
580 591
      node.operator = this.state.value;
581 592
      node.prefix = true;
582 593
@@ -609,9 +620,10 @@
Loading
609 620
    const expr = this.parseUpdate(node, update, refExpressionErrors);
610 621
611 622
    if (isAwait) {
623 +
      const { type } = this.state;
612 624
      const startsExpr = this.hasPlugin("v8intrinsic")
613 -
        ? this.state.type.startsExpr
614 -
        : this.state.type.startsExpr && !this.match(tt.modulo);
625 +
        ? tokenCanStartExpression(type)
626 +
        : tokenCanStartExpression(type) && !this.match(tt.modulo);
615 627
      if (startsExpr && !this.isAmbiguousAwait()) {
616 628
        this.raiseOverwrite(startPos, Errors.AwaitNotInAsyncContext);
617 629
        return this.parseAwait(startPos, startLoc);
@@ -636,7 +648,7 @@
Loading
636 648
    const startLoc = this.state.startLoc;
637 649
    let expr = this.parseExprSubscripts(refExpressionErrors);
638 650
    if (this.checkExpressionErrors(refExpressionErrors, false)) return expr;
639 -
    while (this.state.type.postfix && !this.canInsertSemicolon()) {
651 +
    while (tokenIsPostfix(this.state.type) && !this.canInsertSemicolon()) {
640 652
      const node = this.startNodeAt(startPos, startLoc);
641 653
      node.operator = this.state.value;
642 654
      node.prefix = false;
@@ -1356,7 +1368,7 @@
Loading
1356 1368
      throw this.raise(
1357 1369
        start,
1358 1370
        Errors.PipeTopicUnconfiguredToken,
1359 -
        tokenType.label,
1371 +
        tokenLabelName(tokenType),
1360 1372
      );
1361 1373
    }
1362 1374
  }
@@ -1381,7 +1393,7 @@
Loading
1381 1393
          "pipelineOperator",
1382 1394
          "topicToken",
1383 1395
        );
1384 -
        return tokenType.label === pluginTopicToken;
1396 +
        return tokenLabelName(tokenType) === pluginTopicToken;
1385 1397
      }
1386 1398
      case "smart":
1387 1399
        return tokenType === tt.hash;
@@ -2527,8 +2539,8 @@
Loading
2527 2539
2528 2540
    if (type === tt.name) {
2529 2541
      name = this.state.value;
2530 -
    } else if (type.keyword) {
2531 -
      name = type.keyword;
2542 +
    } else if (tokenIsKeyword(type)) {
2543 +
      name = tokenLabelName(type);
2532 2544
    } else {
2533 2545
      throw this.unexpected();
2534 2546
    }
@@ -2538,7 +2550,7 @@
Loading
2538 2550
      // This will prevent this.next() from throwing about unexpected escapes.
2539 2551
      this.state.type = tt.name;
2540 2552
    } else {
2541 -
      this.checkReservedWord(name, start, !!type.keyword, false);
2553 +
      this.checkReservedWord(name, start, tokenIsKeyword(type), false);
2542 2554
    }
2543 2555
2544 2556
    this.next();

@@ -6,7 +6,12 @@
Loading
6 6
/* eslint-disable @babel/development-internal/dry-error-messages */
7 7
8 8
import type Parser from "../../parser";
9 -
import { types as tt, type TokenType } from "../../tokenizer/types";
9 +
import {
10 +
  tokenIsKeyword,
11 +
  tokenLabelName,
12 +
  tt,
13 +
  type TokenType,
14 +
} from "../../tokenizer/types";
10 15
import * as N from "../../types";
11 16
import type { Position } from "../../util/location";
12 17
import { types as tc } from "../../tokenizer/context";
@@ -157,7 +162,8 @@
Loading
157 162
158 163
function isMaybeDefaultImport(state: { type: TokenType, value: any }): boolean {
159 164
  return (
160 -
    (state.type === tt.name || !!state.type.keyword) && state.value !== "from"
165 +
    (state.type === tt.name || tokenIsKeyword(state.type)) &&
166 +
    state.value !== "from"
161 167
  );
162 168
}
163 169
@@ -1605,11 +1611,12 @@
Loading
1605 1611
          this.next();
1606 1612
          return this.finishNode(node, "ExistsTypeAnnotation");
1607 1613
1614 +
        case tt._typeof:
1615 +
          return this.flowParseTypeofType();
1616 +
1608 1617
        default:
1609 -
          if (this.state.type.keyword === "typeof") {
1610 -
            return this.flowParseTypeofType();
1611 -
          } else if (this.state.type.keyword) {
1612 -
            const label = this.state.type.label;
1618 +
          if (tokenIsKeyword(this.state.type)) {
1619 +
            const label = tokenLabelName(this.state.type);
1613 1620
            this.next();
1614 1621
            return super.createIdentifier(node, label);
1615 1622
          }
@@ -2650,7 +2657,7 @@
Loading
2650 2657
        if (
2651 2658
          specifierTypeKind !== null &&
2652 2659
          !this.match(tt.name) &&
2653 -
          !this.state.type.keyword
2660 +
          !tokenIsKeyword(this.state.type)
2654 2661
        ) {
2655 2662
          // `import {type as ,` or `import {type as }`
2656 2663
          specifier.imported = as_ident;
@@ -2665,7 +2672,7 @@
Loading
2665 2672
      } else {
2666 2673
        if (
2667 2674
          specifierTypeKind !== null &&
2668 -
          (this.match(tt.name) || this.state.type.keyword)
2675 +
          (this.match(tt.name) || tokenIsKeyword(this.state.type))
2669 2676
        ) {
2670 2677
          // `import {type foo`
2671 2678
          specifier.imported = this.parseIdentifier(true);

@@ -7,7 +7,7 @@
Loading
7 7
8 8
import type { TokenType } from "../../tokenizer/types";
9 9
import type State from "../../tokenizer/state";
10 -
import { types as tt } from "../../tokenizer/types";
10 +
import { tokenOperatorPrecedence, tt } from "../../tokenizer/types";
11 11
import { types as ct } from "../../tokenizer/context";
12 12
import * as N from "../../types";
13 13
import type { Position } from "../../util/location";
@@ -2195,7 +2195,7 @@
Loading
2195 2195
      minPrec: number,
2196 2196
    ) {
2197 2197
      if (
2198 -
        nonNull(tt._in.binop) > minPrec &&
2198 +
        tokenOperatorPrecedence(tt._in) > minPrec &&
2199 2199
        !this.hasPrecedingLineBreak() &&
2200 2200
        this.isContextual("as")
2201 2201
      ) {

@@ -3,8 +3,6 @@
Loading
3 3
// The token context is used to track whether the apostrophe "`"
4 4
// starts or ends a string template
5 5
6 -
import { types as tt } from "./types";
7 -
8 6
export class TokContext {
9 7
  constructor(token: string, preserveSpace?: boolean) {
10 8
    this.token = token;
@@ -21,39 +19,3 @@
Loading
21 19
  brace: new TokContext("{"),
22 20
  template: new TokContext("`", true),
23 21
};
24 -
25 -
// Token-specific context update code
26 -
// Note that we should avoid accessing `this.prodParam` in context update,
27 -
// because it is executed immediately when last token is consumed, which may be
28 -
// before `this.prodParam` is updated. e.g.
29 -
// ```
30 -
// function *g() { () => yield / 2 }
31 -
// ```
32 -
// When `=>` is eaten, the context update of `yield` is executed, however,
33 -
// `this.prodParam` still has `[Yield]` production because it is not yet updated
34 -
35 -
tt.braceR.updateContext = context => {
36 -
  context.pop();
37 -
};
38 -
39 -
// we don't need to update context for tt.braceBarL because we do not pop context for tt.braceBarR
40 -
// ideally only dollarBraceL "${" needs a non-template context
41 -
// in order to indicate that the last "`" in `${`" starts a new string template
42 -
// inside a template element within outer string template.
43 -
// but when we popped such context in `}`, we lost track of whether this
44 -
// `}` matches a `${` or other tokens matching `}`, so we have to push
45 -
// such context in every token that `}` will match.
46 -
tt.braceL.updateContext =
47 -
  tt.braceHashL.updateContext =
48 -
  tt.dollarBraceL.updateContext =
49 -
    context => {
50 -
      context.push(types.brace);
51 -
    };
52 -
53 -
tt.backQuote.updateContext = context => {
54 -
  if (context[context.length - 1] === types.template) {
55 -
    context.pop();
56 -
  } else {
57 -
    context.push(types.template);
58 -
  }
59 -
};

@@ -1,5 +1,5 @@
Loading
1 1
// @flow
2 -
import type { TokContext } from "./context";
2 +
import { types as tc, type TokContext } from "./context";
3 3
// ## Token types
4 4
5 5
// The assignment of fine-grained, information-carrying type objects
@@ -41,7 +41,13 @@
Loading
41 41
  binop?: ?number,
42 42
};
43 43
44 -
export class TokenType {
44 +
// Internally the tokenizer stores token as a number
45 +
export opaque type TokenType = number;
46 +
47 +
// The `ExportedTokenType` is exported via `tokTypes` and accessible
48 +
// when `tokens: true` is enabled. Unlike internal token type, it provides
49 +
// metadata of the tokens.
50 +
export class ExportedTokenType {
45 51
  label: string;
46 52
  keyword: ?string;
47 53
  beforeExpr: boolean;
@@ -52,7 +58,8 @@
Loading
52 58
  prefix: boolean;
53 59
  postfix: boolean;
54 60
  binop: ?number;
55 -
  updateContext: ?(context: Array<TokContext>) => void;
61 +
  // todo(Babel 8): remove updateContext from exposed token layout
62 +
  declare updateContext: ?(context: Array<TokContext>) => void;
56 63
57 64
  constructor(label: string, conf: TokenOptions = {}) {
58 65
    this.label = label;
@@ -65,7 +72,9 @@
Loading
65 72
    this.prefix = !!conf.prefix;
66 73
    this.postfix = !!conf.postfix;
67 74
    this.binop = conf.binop != null ? conf.binop : null;
68 -
    this.updateContext = null;
75 +
    if (!process.env.BABEL_8_BREAKING) {
76 +
      this.updateContext = null;
77 +
    }
69 78
  }
70 79
}
71 80
@@ -73,55 +82,78 @@
Loading
73 82
74 83
function createKeyword(name: string, options: TokenOptions = {}): TokenType {
75 84
  options.keyword = name;
76 -
  const token = new TokenType(name, options);
85 +
  const token = createToken(name, options);
77 86
  keywords.set(name, token);
78 87
  return token;
79 88
}
80 89
81 90
function createBinop(name: string, binop: number) {
82 -
  return new TokenType(name, { beforeExpr, binop });
91 +
  return createToken(name, { beforeExpr, binop });
83 92
}
84 93
85 -
export const types: { [name: string]: TokenType } = {
86 -
  num: new TokenType("num", { startsExpr }),
87 -
  bigint: new TokenType("bigint", { startsExpr }),
88 -
  decimal: new TokenType("decimal", { startsExpr }),
89 -
  regexp: new TokenType("regexp", { startsExpr }),
90 -
  string: new TokenType("string", { startsExpr }),
91 -
  name: new TokenType("name", { startsExpr }),
92 -
  privateName: new TokenType("#name", { startsExpr }),
93 -
  eof: new TokenType("eof"),
94 +
let tokenTypeCounter = -1;
95 +
export const tokenTypes: ExportedTokenType[] = [];
96 +
const tokenLabels: string[] = [];
97 +
const tokenBinops: number[] = [];
98 +
const tokenBeforeExprs: boolean[] = [];
99 +
const tokenStartsExprs: boolean[] = [];
100 +
const tokenPrefixes: boolean[] = [];
101 +
102 +
function createToken(name: string, options: TokenOptions = {}): TokenType {
103 +
  ++tokenTypeCounter;
104 +
  tokenLabels.push(name);
105 +
  tokenBinops.push(options.binop ?? -1);
106 +
  tokenBeforeExprs.push(options.beforeExpr ?? false);
107 +
  tokenStartsExprs.push(options.startsExpr ?? false);
108 +
  tokenPrefixes.push(options.prefix ?? false);
109 +
  tokenTypes.push(new ExportedTokenType(name, options));
110 +
111 +
  return tokenTypeCounter;
112 +
}
113 +
114 +
// For performance the token type helpers depend on the following declarations order.
115 +
// When adding new token types, please also check if the token helpers need update.
116 +
117 +
export const tt: { [name: string]: TokenType } = {
118 +
  num: createToken("num", { startsExpr }),
119 +
  bigint: createToken("bigint", { startsExpr }),
120 +
  decimal: createToken("decimal", { startsExpr }),
121 +
  regexp: createToken("regexp", { startsExpr }),
122 +
  string: createToken("string", { startsExpr }),
123 +
  name: createToken("name", { startsExpr }),
124 +
  privateName: createToken("#name", { startsExpr }),
125 +
  eof: createToken("eof"),
94 126
95 127
  // Punctuation token types.
96 -
  bracketL: new TokenType("[", { beforeExpr, startsExpr }),
97 -
  bracketHashL: new TokenType("#[", { beforeExpr, startsExpr }),
98 -
  bracketBarL: new TokenType("[|", { beforeExpr, startsExpr }),
99 -
  bracketR: new TokenType("]"),
100 -
  bracketBarR: new TokenType("|]"),
101 -
  braceL: new TokenType("{", { beforeExpr, startsExpr }),
102 -
  braceBarL: new TokenType("{|", { beforeExpr, startsExpr }),
103 -
  braceHashL: new TokenType("#{", { beforeExpr, startsExpr }),
104 -
  braceR: new TokenType("}", { beforeExpr }),
105 -
  braceBarR: new TokenType("|}"),
106 -
  parenL: new TokenType("(", { beforeExpr, startsExpr }),
107 -
  parenR: new TokenType(")"),
108 -
  comma: new TokenType(",", { beforeExpr }),
109 -
  semi: new TokenType(";", { beforeExpr }),
110 -
  colon: new TokenType(":", { beforeExpr }),
111 -
  doubleColon: new TokenType("::", { beforeExpr }),
112 -
  dot: new TokenType("."),
113 -
  question: new TokenType("?", { beforeExpr }),
114 -
  questionDot: new TokenType("?."),
115 -
  arrow: new TokenType("=>", { beforeExpr }),
116 -
  template: new TokenType("template"),
117 -
  ellipsis: new TokenType("...", { beforeExpr }),
118 -
  backQuote: new TokenType("`", { startsExpr }),
119 -
  dollarBraceL: new TokenType("${", { beforeExpr, startsExpr }),
120 -
  at: new TokenType("@"),
121 -
  hash: new TokenType("#", { startsExpr }),
128 +
  bracketL: createToken("[", { beforeExpr, startsExpr }),
129 +
  bracketHashL: createToken("#[", { beforeExpr, startsExpr }),
130 +
  bracketBarL: createToken("[|", { beforeExpr, startsExpr }),
131 +
  bracketR: createToken("]"),
132 +
  bracketBarR: createToken("|]"),
133 +
  braceL: createToken("{", { beforeExpr, startsExpr }),
134 +
  braceBarL: createToken("{|", { beforeExpr, startsExpr }),
135 +
  braceHashL: createToken("#{", { beforeExpr, startsExpr }),
136 +
  braceR: createToken("}", { beforeExpr }),
137 +
  braceBarR: createToken("|}"),
138 +
  parenL: createToken("(", { beforeExpr, startsExpr }),
139 +
  parenR: createToken(")"),
140 +
  comma: createToken(",", { beforeExpr }),
141 +
  semi: createToken(";", { beforeExpr }),
142 +
  colon: createToken(":", { beforeExpr }),
143 +
  doubleColon: createToken("::", { beforeExpr }),
144 +
  dot: createToken("."),
145 +
  question: createToken("?", { beforeExpr }),
146 +
  questionDot: createToken("?."),
147 +
  arrow: createToken("=>", { beforeExpr }),
148 +
  template: createToken("template"),
149 +
  ellipsis: createToken("...", { beforeExpr }),
150 +
  backQuote: createToken("`", { startsExpr }),
151 +
  dollarBraceL: createToken("${", { beforeExpr, startsExpr }),
152 +
  at: createToken("@"),
153 +
  hash: createToken("#", { startsExpr }),
122 154
123 155
  // Special hashbang token.
124 -
  interpreterDirective: new TokenType("#!..."),
156 +
  interpreterDirective: createToken("#!..."),
125 157
126 158
  // Operators. These carry several kinds of properties to help the
127 159
  // parser use them properly (the presence of these properties is
@@ -137,15 +169,19 @@
Loading
137 169
  // binary operators with a very low precedence, that should result
138 170
  // in AssignmentExpression nodes.
139 171
140 -
  eq: new TokenType("=", { beforeExpr, isAssign }),
141 -
  assign: new TokenType("_=", { beforeExpr, isAssign }),
142 -
  slashAssign: new TokenType("_=", { beforeExpr, isAssign }),
172 +
  // start: isAssign
173 +
  eq: createToken("=", { beforeExpr, isAssign }),
174 +
  assign: createToken("_=", { beforeExpr, isAssign }),
175 +
  slashAssign: createToken("_=", { beforeExpr, isAssign }),
143 176
  // This is only needed to support % as a Hack-pipe topic token. If the proposal
144 177
  // ends up choosing a different token, it can be merged with tt.assign.
145 -
  moduloAssign: new TokenType("_=", { beforeExpr, isAssign }),
146 -
  incDec: new TokenType("++/--", { prefix, postfix, startsExpr }),
147 -
  bang: new TokenType("!", { beforeExpr, prefix, startsExpr }),
148 -
  tilde: new TokenType("~", { beforeExpr, prefix, startsExpr }),
178 +
  moduloAssign: createToken("_=", { beforeExpr, isAssign }),
179 +
  // end: isAssign
180 +
181 +
  incDec: createToken("++/--", { prefix, postfix, startsExpr }),
182 +
  bang: createToken("!", { beforeExpr, prefix, startsExpr }),
183 +
  tilde: createToken("~", { beforeExpr, prefix, startsExpr }),
184 +
  // start: isBinop
149 185
  pipeline: createBinop("|>", 0),
150 186
  nullishCoalescing: createBinop("??", 1),
151 187
  logicalOR: createBinop("||", 1),
@@ -156,13 +192,13 @@
Loading
156 192
  equality: createBinop("==/!=/===/!==", 6),
157 193
  relational: createBinop("</>/<=/>=", 7),
158 194
  bitShift: createBinop("<</>>/>>>", 8),
159 -
  plusMin: new TokenType("+/-", { beforeExpr, binop: 9, prefix, startsExpr }),
195 +
  plusMin: createToken("+/-", { beforeExpr, binop: 9, prefix, startsExpr }),
160 196
  // startsExpr: required by v8intrinsic plugin
161 -
  modulo: new TokenType("%", { binop: 10, startsExpr }),
197 +
  modulo: createToken("%", { binop: 10, startsExpr }),
162 198
  // unset `beforeExpr` as it can be `function *`
163 -
  star: new TokenType("*", { binop: 10 }),
199 +
  star: createToken("*", { binop: 10 }),
164 200
  slash: createBinop("/", 10),
165 -
  exponent: new TokenType("**", {
201 +
  exponent: createToken("**", {
166 202
    beforeExpr,
167 203
    binop: 11,
168 204
    rightAssociative: true,
@@ -171,16 +207,18 @@
Loading
171 207
  // Keywords
172 208
  // Don't forget to update packages/babel-helper-validator-identifier/src/keyword.js
173 209
  // when new keywords are added
210 +
  // start: isKeyword
211 +
  _in: createKeyword("in", { beforeExpr, binop: 7 }),
212 +
  _instanceof: createKeyword("instanceof", { beforeExpr, binop: 7 }),
213 +
  // end: isBinop
174 214
  _break: createKeyword("break"),
175 215
  _case: createKeyword("case", { beforeExpr }),
176 216
  _catch: createKeyword("catch"),
177 217
  _continue: createKeyword("continue"),
178 218
  _debugger: createKeyword("debugger"),
179 219
  _default: createKeyword("default", { beforeExpr }),
180 -
  _do: createKeyword("do", { isLoop, beforeExpr }),
181 220
  _else: createKeyword("else", { beforeExpr }),
182 221
  _finally: createKeyword("finally"),
183 -
  _for: createKeyword("for", { isLoop }),
184 222
  _function: createKeyword("function", { startsExpr }),
185 223
  _if: createKeyword("if"),
186 224
  _return: createKeyword("return", { beforeExpr }),
@@ -189,7 +227,6 @@
Loading
189 227
  _try: createKeyword("try"),
190 228
  _var: createKeyword("var"),
191 229
  _const: createKeyword("const"),
192 -
  _while: createKeyword("while", { isLoop }),
193 230
  _with: createKeyword("with"),
194 231
  _new: createKeyword("new", { beforeExpr, startsExpr }),
195 232
  _this: createKeyword("this", { startsExpr }),
@@ -201,9 +238,99 @@
Loading
201 238
  _null: createKeyword("null", { startsExpr }),
202 239
  _true: createKeyword("true", { startsExpr }),
203 240
  _false: createKeyword("false", { startsExpr }),
204 -
  _in: createKeyword("in", { beforeExpr, binop: 7 }),
205 -
  _instanceof: createKeyword("instanceof", { beforeExpr, binop: 7 }),
206 241
  _typeof: createKeyword("typeof", { beforeExpr, prefix, startsExpr }),
207 242
  _void: createKeyword("void", { beforeExpr, prefix, startsExpr }),
208 243
  _delete: createKeyword("delete", { beforeExpr, prefix, startsExpr }),
244 +
  // start: isLoop
245 +
  _do: createKeyword("do", { isLoop, beforeExpr }),
246 +
  _for: createKeyword("for", { isLoop }),
247 +
  _while: createKeyword("while", { isLoop }),
248 +
  // end: isLoop
249 +
  // end: isKeyword
250 +
251 +
  // jsx plugin
252 +
  jsxName: createToken("jsxName"),
253 +
  jsxText: createToken("jsxText", { beforeExpr: true }),
254 +
  jsxTagStart: createToken("jsxTagStart", { startsExpr: true }),
255 +
  jsxTagEnd: createToken("jsxTagEnd"),
256 +
257 +
  // placeholder plugin
258 +
  placeholder: createToken("%%", { startsExpr: true }),
209 259
};
260 +
261 +
export function tokenComesBeforeExpression(token: TokenType): boolean {
262 +
  return tokenBeforeExprs[token];
263 +
}
264 +
265 +
export function tokenCanStartExpression(token: TokenType): boolean {
266 +
  return tokenStartsExprs[token];
267 +
}
268 +
269 +
export function tokenIsAssignment(token: TokenType): boolean {
270 +
  return token >= tt.eq && token <= tt.moduloAssign;
271 +
}
272 +
273 +
export function tokenIsLoop(token: TokenType): boolean {
274 +
  return token >= tt._do && token <= tt._while;
275 +
}
276 +
277 +
export function tokenIsKeyword(token: TokenType): boolean {
278 +
  return token >= tt._in && token <= tt._while;
279 +
}
280 +
281 +
export function tokenIsOperator(token: TokenType): boolean {
282 +
  return token >= tt.pipeline && token <= tt._instanceof;
283 +
}
284 +
285 +
export function tokenIsPostfix(token: TokenType): boolean {
286 +
  return token === tt.incDec;
287 +
}
288 +
289 +
export function tokenIsPrefix(token: TokenType): boolean {
290 +
  return tokenPrefixes[token];
291 +
}
292 +
293 +
export function tokenLabelName(token: TokenType): string {
294 +
  return tokenLabels[token];
295 +
}
296 +
297 +
export function tokenOperatorPrecedence(token: TokenType): number {
298 +
  return tokenBinops[token];
299 +
}
300 +
301 +
export function tokenIsRightAssociative(token: TokenType): boolean {
302 +
  return token === tt.exponent;
303 +
}
304 +
305 +
export function getExportedToken(token: TokenType): ExportedTokenType {
306 +
  return tokenTypes[token];
307 +
}
308 +
309 +
export function isTokenType(obj: any): boolean {
310 +
  return typeof obj === "number";
311 +
}
312 +
313 +
if (!process.env.BABEL_8_BREAKING) {
314 +
  tokenTypes[tt.braceR].updateContext = context => {
315 +
    context.pop();
316 +
  };
317 +
318 +
  tokenTypes[tt.braceL].updateContext =
319 +
    tokenTypes[tt.braceHashL].updateContext =
320 +
    tokenTypes[tt.dollarBraceL].updateContext =
321 +
      context => {
322 +
        context.push(tc.brace);
323 +
      };
324 +
325 +
  tokenTypes[tt.backQuote].updateContext = context => {
326 +
    if (context[context.length - 1] === tc.template) {
327 +
      context.pop();
328 +
    } else {
329 +
      context.push(tc.template);
330 +
    }
331 +
  };
332 +
333 +
  tokenTypes[tt.jsxTagStart].updateContext = context => {
334 +
    context.push(tc.j_expr, tc.j_oTag);
335 +
  };
336 +
}

@@ -2,13 +2,11 @@
Loading
2 2
3 3
import * as charCodes from "charcodes";
4 4
5 -
import { types as tt, TokenType } from "../tokenizer/types";
5 +
import { tokenLabelName, tt } from "../tokenizer/types";
6 6
import type Parser from "../parser";
7 7
import * as N from "../types";
8 8
import { makeErrorTemplates, ErrorCodes } from "../parser/error";
9 9
10 -
tt.placeholder = new TokenType("%%", { startsExpr: true });
11 -
12 10
export type PlaceholderTypes =
13 11
  | "Identifier"
14 12
  | "StringLiteral"
@@ -288,7 +286,7 @@
Loading
288 286
        if (this.isUnparsedContextual(next, "from")) {
289 287
          if (
290 288
            this.input.startsWith(
291 -
              tt.placeholder.label,
289 +
              tokenLabelName(tt.placeholder),
292 290
              this.nextTokenStartSince(next + 4),
293 291
            )
294 292
          ) {

@@ -1,6 +1,6 @@
Loading
1 1
// @flow
2 2
3 -
import { TokenType } from "../tokenizer/types";
3 +
import { type TokenType } from "../tokenizer/types";
4 4
import type Parser from "../parser";
5 5
import type { ExpressionErrors } from "../parser/util";
6 6
import * as N from "../types";

@@ -6,7 +6,13 @@
Loading
6 6
import * as N from "../types";
7 7
import * as charCodes from "charcodes";
8 8
import { isIdentifierStart, isIdentifierChar } from "../util/identifier";
9 -
import { types as tt, keywords as keywordTypes, type TokenType } from "./types";
9 +
import {
10 +
  tokenIsKeyword,
11 +
  tokenLabelName,
12 +
  tt,
13 +
  keywords as keywordTypes,
14 +
  type TokenType,
15 +
} from "./types";
10 16
import { type TokContext, types as ct } from "./context";
11 17
import ParserErrors, { Errors, type ErrorTemplate } from "../parser/error";
12 18
import { SourceLocation } from "../util/location";
@@ -1564,15 +1570,54 @@
Loading
1564 1570
  }
1565 1571
1566 1572
  checkKeywordEscapes(): void {
1567 -
    const kw = this.state.type.keyword;
1568 -
    if (kw && this.state.containsEsc) {
1569 -
      this.raise(this.state.start, Errors.InvalidEscapedReservedWord, kw);
1573 +
    const { type } = this.state;
1574 +
    if (tokenIsKeyword(type) && this.state.containsEsc) {
1575 +
      this.raise(
1576 +
        this.state.start,
1577 +
        Errors.InvalidEscapedReservedWord,
1578 +
        tokenLabelName(type),
1579 +
      );
1570 1580
    }
1571 1581
  }
1572 1582
1573 1583
  // the prevType is required by the jsx plugin
1574 1584
  // eslint-disable-next-line no-unused-vars
1575 1585
  updateContext(prevType: TokenType): void {
1576 -
    this.state.type.updateContext?.(this.state.context);
1586 +
    // Token-specific context update code
1587 +
    // Note that we should avoid accessing `this.prodParam` in context update,
1588 +
    // because it is executed immediately when last token is consumed, which may be
1589 +
    // before `this.prodParam` is updated. e.g.
1590 +
    // ```
1591 +
    // function *g() { () => yield / 2 }
1592 +
    // ```
1593 +
    // When `=>` is eaten, the context update of `yield` is executed, however,
1594 +
    // `this.prodParam` still has `[Yield]` production because it is not yet updated
1595 +
    const { context, type } = this.state;
1596 +
    switch (type) {
1597 +
      case tt.braceR:
1598 +
        context.pop();
1599 +
        break;
1600 +
      // we don't need to update context for tt.braceBarL because we do not pop context for tt.braceBarR
1601 +
      // ideally only dollarBraceL "${" needs a non-template context
1602 +
      // in order to indicate that the last "`" in `${`" starts a new string template
1603 +
      // inside a template element within outer string template.
1604 +
      // but when we popped such context in `}`, we lost track of whether this
1605 +
      // `}` matches a `${` or other tokens matching `}`, so we have to push
1606 +
      // such context in every token that `}` will match.
1607 +
      case tt.braceL:
1608 +
      case tt.braceHashL:
1609 +
      case tt.dollarBraceL:
1610 +
        context.push(ct.brace);
1611 +
        break;
1612 +
      case tt.backQuote:
1613 +
        if (context[context.length - 1] === ct.template) {
1614 +
          context.pop();
1615 +
        } else {
1616 +
          context.push(ct.template);
1617 +
        }
1618 +
        break;
1619 +
      default:
1620 +
        break;
1621 +
    }
1577 1622
  }
1578 1623
}

@@ -1,6 +1,12 @@
Loading
1 1
// @flow
2 2
3 -
import { types as tt, TokenType } from "../tokenizer/types";
3 +
import {
4 +
  isTokenType,
5 +
  tokenIsKeyword,
6 +
  tokenLabelName,
7 +
  tt,
8 +
  type TokenType,
9 +
} from "../tokenizer/types";
4 10
import Tokenizer from "../tokenizer";
5 11
import State from "../tokenizer/state";
6 12
import type { Node } from "../types";
@@ -168,15 +174,19 @@
Loading
168 174
      template: "Unexpected token",
169 175
    },
170 176
  ): empty {
171 -
    if (messageOrType instanceof TokenType) {
177 +
    if (isTokenType(messageOrType)) {
172 178
      messageOrType = {
173 179
        code: ErrorCodes.SyntaxError,
174 180
        reasonCode: "UnexpectedToken",
175 -
        template: `Unexpected token, expected "${messageOrType.label}"`,
181 +
        template: `Unexpected token, expected "${tokenLabelName(
182 +
          // $FlowIgnore: Flow does not support assertion signature and TokenType is opaque
183 +
          messageOrType,
184 +
        )}"`,
176 185
      };
177 186
    }
178 187
179 188
    /* eslint-disable @babel/development-internal/dry-error-messages */
189 +
    // $FlowIgnore: Flow does not support assertion signature and TokenType is opaque
180 190
    throw this.raise(pos != null ? pos : this.state.start, messageOrType);
181 191
    /* eslint-enable @babel/development-internal/dry-error-messages */
182 192
  }
@@ -298,7 +308,7 @@
Loading
298 308
  isLiteralPropertyName(): boolean {
299 309
    return (
300 310
      this.match(tt.name) ||
301 -
      !!this.state.type.keyword ||
311 +
      tokenIsKeyword(this.state.type) ||
302 312
      this.match(tt.string) ||
303 313
      this.match(tt.num) ||
304 314
      this.match(tt.bigint) ||

@@ -8,7 +8,13 @@
Loading
8 8
import XHTMLEntities from "./xhtml";
9 9
import type Parser from "../../parser";
10 10
import type { ExpressionErrors } from "../../parser/util";
11 -
import { TokenType, types as tt } from "../../tokenizer/types";
11 +
import {
12 +
  tokenComesBeforeExpression,
13 +
  tokenIsKeyword,
14 +
  tokenLabelName,
15 +
  type TokenType,
16 +
  tt,
17 +
} from "../../tokenizer/types";
12 18
import { TokContext, types as tc } from "../../tokenizer/context";
13 19
import * as N from "../../types";
14 20
import { isIdentifierChar, isIdentifierStart } from "../../util/identifier";
@@ -45,23 +51,11 @@
Loading
45 51
/* eslint-disable sort-keys */
46 52
47 53
// Be aware that this file is always executed and not only when the plugin is enabled.
48 -
// Therefore this contexts and tokens do always exist.
54 +
// Therefore the contexts do always exist.
49 55
tc.j_oTag = new TokContext("<tag");
50 56
tc.j_cTag = new TokContext("</tag");
51 57
tc.j_expr = new TokContext("<tag>...</tag>", true);
52 58
53 -
tt.jsxName = new TokenType("jsxName");
54 -
tt.jsxText = new TokenType("jsxText", { beforeExpr: true });
55 -
tt.jsxTagStart = new TokenType("jsxTagStart", { startsExpr: true });
56 -
tt.jsxTagEnd = new TokenType("jsxTagEnd");
57 -
58 -
tt.jsxTagStart.updateContext = context => {
59 -
  context.push(
60 -
    tc.j_expr, // treat as beginning of JSX expression
61 -
    tc.j_oTag, // start opening tag context
62 -
  );
63 -
};
64 -
65 59
function isFragment(object: ?N.JSXElement): boolean {
66 60
  return object
67 61
    ? object.type === "JSXOpeningFragment" ||
@@ -259,8 +253,8 @@
Loading
259 253
      const node = this.startNode();
260 254
      if (this.match(tt.jsxName)) {
261 255
        node.name = this.state.value;
262 -
      } else if (this.state.type.keyword) {
263 -
        node.name = this.state.type.keyword;
256 +
      } else if (tokenIsKeyword(this.state.type)) {
257 +
        node.name = tokenLabelName(this.state.type);
264 258
      } else {
265 259
        this.unexpected();
266 260
      }
@@ -624,6 +618,11 @@
Loading
624 618
        // reconsider as closing tag context
625 619
        context.splice(-2, 2, tc.j_cTag);
626 620
        this.state.exprAllowed = false;
621 +
      } else if (type === tt.jsxTagStart) {
622 +
        context.push(
623 +
          tc.j_expr, // treat as beginning of JSX expression
624 +
          tc.j_oTag, // start opening tag context
625 +
        );
627 626
      } else if (type === tt.jsxTagEnd) {
628 627
        const out = context.pop();
629 628
        if ((out === tc.j_oTag && prevType === tt.slash) || out === tc.j_cTag) {
@@ -633,12 +632,12 @@
Loading
633 632
          this.state.exprAllowed = true;
634 633
        }
635 634
      } else if (
636 -
        type.keyword &&
635 +
        tokenIsKeyword(type) &&
637 636
        (prevType === tt.dot || prevType === tt.questionDot)
638 637
      ) {
639 638
        this.state.exprAllowed = false;
640 639
      } else {
641 -
        this.state.exprAllowed = type.beforeExpr;
640 +
        this.state.exprAllowed = tokenComesBeforeExpression(type);
642 641
      }
643 642
    }
644 643
  };
Files Coverage
codemods 100.00%
eslint 95.07%
packages 92.00%
Project Totals (457 files) 92.10%
1
coverage:
2
  parsers:
3
    javascript:
4
      enable_partials: yes
5
  status:
6
    project:
7
      default:
8
        target: "90%"
9
    patch:
10
      enabled: false
11
ignore:
12
  - packages/babel-types/src/*/generated/*
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading