You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

140 lines
2.9 KiB

  1. """Token constants (from "token.h")."""
  2. __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
  3. # This file is automatically generated; please don't muck it up!
  4. #
  5. # To update the symbols in this file, 'cd' to the top directory of
  6. # the python source tree after building the interpreter and run:
  7. #
  8. # python Lib/token.py
  9. #--start constants--
  10. ENDMARKER = 0
  11. NAME = 1
  12. NUMBER = 2
  13. STRING = 3
  14. NEWLINE = 4
  15. INDENT = 5
  16. DEDENT = 6
  17. LPAR = 7
  18. RPAR = 8
  19. LSQB = 9
  20. RSQB = 10
  21. COLON = 11
  22. COMMA = 12
  23. SEMI = 13
  24. PLUS = 14
  25. MINUS = 15
  26. STAR = 16
  27. SLASH = 17
  28. VBAR = 18
  29. AMPER = 19
  30. LESS = 20
  31. GREATER = 21
  32. EQUAL = 22
  33. DOT = 23
  34. PERCENT = 24
  35. LBRACE = 26
  36. RBRACE = 27
  37. EQEQUAL = 28
  38. NOTEQUAL = 29
  39. LESSEQUAL = 30
  40. GREATEREQUAL = 31
  41. TILDE = 32
  42. CIRCUMFLEX = 33
  43. LEFTSHIFT = 34
  44. RIGHTSHIFT = 35
  45. DOUBLESTAR = 36
  46. PLUSEQUAL = 37
  47. MINEQUAL = 38
  48. STAREQUAL = 39
  49. SLASHEQUAL = 40
  50. PERCENTEQUAL = 41
  51. AMPEREQUAL = 42
  52. VBAREQUAL = 43
  53. CIRCUMFLEXEQUAL = 44
  54. LEFTSHIFTEQUAL = 45
  55. RIGHTSHIFTEQUAL = 46
  56. DOUBLESTAREQUAL = 47
  57. DOUBLESLASH = 48
  58. DOUBLESLASHEQUAL = 49
  59. AT = 50
  60. RARROW = 51
  61. ELLIPSIS = 52
  62. OP = 53
  63. ERRORTOKEN = 54
  64. N_TOKENS = 55
  65. NT_OFFSET = 256
  66. #--end constants--
  67. tok_name = {value: name
  68. for name, value in globals().items()
  69. if isinstance(value, int)}
  70. __all__.extend(tok_name.values())
  71. def ISTERMINAL(x):
  72. return x < NT_OFFSET
  73. def ISNONTERMINAL(x):
  74. return x >= NT_OFFSET
  75. def ISEOF(x):
  76. return x == ENDMARKER
  77. def _main():
  78. import re
  79. import sys
  80. args = sys.argv[1:]
  81. inFileName = args and args[0] or "Include/token.h"
  82. outFileName = "Lib/token.py"
  83. if len(args) > 1:
  84. outFileName = args[1]
  85. try:
  86. fp = open(inFileName)
  87. except IOError as err:
  88. sys.stdout.write("I/O error: %s\n" % str(err))
  89. sys.exit(1)
  90. lines = fp.read().split("\n")
  91. fp.close()
  92. prog = re.compile(
  93. "#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
  94. re.IGNORECASE)
  95. tokens = {}
  96. for line in lines:
  97. match = prog.match(line)
  98. if match:
  99. name, val = match.group(1, 2)
  100. val = int(val)
  101. tokens[val] = name # reverse so we can sort them...
  102. keys = sorted(tokens.keys())
  103. # load the output skeleton from the target:
  104. try:
  105. fp = open(outFileName)
  106. except IOError as err:
  107. sys.stderr.write("I/O error: %s\n" % str(err))
  108. sys.exit(2)
  109. format = fp.read().split("\n")
  110. fp.close()
  111. try:
  112. start = format.index("#--start constants--") + 1
  113. end = format.index("#--end constants--")
  114. except ValueError:
  115. sys.stderr.write("target does not contain format markers")
  116. sys.exit(3)
  117. lines = []
  118. for val in keys:
  119. lines.append("%s = %d" % (tokens[val], val))
  120. format[start:end] = lines
  121. try:
  122. fp = open(outFileName, 'w')
  123. except IOError as err:
  124. sys.stderr.write("I/O error: %s\n" % str(err))
  125. sys.exit(4)
  126. fp.write("\n".join(format))
  127. fp.close()
  128. if __name__ == "__main__":
  129. _main()