PageRenderTime 51ms CodeModel.GetById 18ms RepoModel.GetById 0ms app.codeStats 0ms

/unladen_swallow/lib/spitfire/spitfire/compiler/parser.py

https://bitbucket.org/csenger/benchmarks
Python | 826 lines | 780 code | 45 blank | 1 comment | 123 complexity | 95384e82b54f1592ce8fd70cecfb5159 MD5 | raw file
Possible License(s): BSD-3-Clause, Apache-2.0, GPL-2.0
  1. # This parser can parse a simple subset of Cheetah's syntax.
  2. from spitfire.compiler.ast import *
  3. from string import *
  4. import re
  5. from yappsrt import *
  6. class SpitfireParserScanner(Scanner):
  7. patterns = [
  8. ("'[ \\t]*\\+[ \\t]*'", re.compile('[ \t]*\\+[ \t]*')),
  9. ("'[ \\t]*\\%[ \\t]*'", re.compile('[ \t]*\\%[ \t]*')),
  10. ("'[ \\t]*\\/[ \\t]*'", re.compile('[ \t]*\\/[ \t]*')),
  11. ("'[ \\t]*\\*[ \\t]*'", re.compile('[ \t]*\\*[ \t]*')),
  12. ("'[ \\t]*\\-[ \\t]*'", re.compile('[ \t]*\\-[ \t]*')),
  13. ('"[ \\t]*not[ \\t]*"', re.compile('[ \t]*not[ \t]*')),
  14. ("'[ \\t]*and[ \\t]*'", re.compile('[ \t]*and[ \t]*')),
  15. ("'[ \\t]*or[ \\t]*'", re.compile('[ \t]*or[ \t]*')),
  16. ('"\\."', re.compile('\\.')),
  17. ('"False"', re.compile('False')),
  18. ('"True"', re.compile('True')),
  19. ('"\'"', re.compile("'")),
  20. ('\'"\'', re.compile('"')),
  21. ("'#else'", re.compile('#else')),
  22. ("'#elif'", re.compile('#elif')),
  23. ("'for'", re.compile('for')),
  24. ("'[ \\t]*in[ \\t]*'", re.compile('[ \t]*in[ \t]*')),
  25. ("'for[ \\t]*'", re.compile('for[ \t]*')),
  26. ("'def'", re.compile('def')),
  27. ("'i18n'", re.compile('i18n')),
  28. ("'block'", re.compile('block')),
  29. ("'else'", re.compile('else')),
  30. ("'if'", re.compile('if')),
  31. ("'echo'", re.compile('echo')),
  32. ("'set'", re.compile('set')),
  33. ("'filter'", re.compile('filter')),
  34. ("'attr'", re.compile('attr')),
  35. ("'continue'", re.compile('continue')),
  36. ("'break'", re.compile('break')),
  37. ("'slurp'", re.compile('slurp')),
  38. ("'import'", re.compile('import')),
  39. ("'from'", re.compile('from')),
  40. ("'absolute_extends'", re.compile('absolute_extends')),
  41. ("'extends'", re.compile('extends')),
  42. ("'implements'", re.compile('implements')),
  43. ('DOT', re.compile('\\.')),
  44. ('NUM', re.compile('[0-9]+')),
  45. ('ID', re.compile('[A-Za-z_][0-9A-Za-z_]*')),
  46. ('SINGLE_QUOTE_STR', re.compile("(?:[^'\\\\]|\\\\.)*")),
  47. ('DOUBLE_QUOTE_STR', re.compile('(?:[^"\\\\]|\\\\.)*')),
  48. ('SINGLE_LINE_COMMENT', re.compile('#.*?\n')),
  49. ('MULTI_LINE_COMMENT', re.compile('\\*[\\W\\w\\S\\s]+\\*#')),
  50. ('ASSIGN_OPERATOR', re.compile('=')),
  51. ('COMP_OPERATOR', re.compile('[ \t]*(<=|>=|==|>|<|!=|[ \t]+in[ \t]+)[ \t]*')),
  52. ('OPEN_PAREN', re.compile('[ \t]*\\([ \t]*')),
  53. ('PLACEHOLDER_OPEN_PAREN', re.compile('\\([ \t]*')),
  54. ('CLOSE_PAREN', re.compile('[ \t]*\\)')),
  55. ('OPEN_BRACKET', re.compile('[ \t]*\\[[ \t]*')),
  56. ('CLOSE_BRACKET', re.compile('[ \t]*\\][ \t]*')),
  57. ('PLACEHOLDER_OPEN_BRACE', re.compile('\\{[ \t]*')),
  58. ('PLACEHOLDER_CLOSE_BRACE', re.compile('[ \t]*\\}')),
  59. ('OPEN_BRACE', re.compile('[ \t]*\\{[ \t]*')),
  60. ('CLOSE_BRACE', re.compile('[ \t]*\\}[ \t]*')),
  61. ('PIPE', re.compile('[ \t]*\\|[ \t]*')),
  62. ('COMMA_DELIMITER', re.compile('[ \t]*,[ \t]*')),
  63. ('COLON_DELIMITER', re.compile('[ \t]*:[ \t]*')),
  64. ('SPACE', re.compile('[ \t]+')),
  65. ('CLOSE_DIRECTIVE_TOKEN', re.compile('[ \t]*[\n#]')),
  66. ('END_DIRECTIVE', re.compile('#end')),
  67. ('START_DIRECTIVE', re.compile('#')),
  68. ('START_PLACEHOLDER', re.compile('\\$')),
  69. ('NEWLINE', re.compile('\n')),
  70. ('PYTHON_LINE', re.compile('.+')),
  71. ('TEXT', re.compile('[^#\\$\n]+')),
  72. ('END', re.compile('$')),
  73. ('I18N_BODY', re.compile('[^#]+')),
  74. ]
  75. def __init__(self, str):
  76. Scanner.__init__(self,None,[],str)
  77. class SpitfireParser(Parser):
  78. def CLOSE_DIRECTIVE(self):
  79. if self._peek('SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'SPACE':
  80. SPACE = self._scan('SPACE')
  81. CLOSE_DIRECTIVE_TOKEN = self._scan('CLOSE_DIRECTIVE_TOKEN')
  82. return CLOSE_DIRECTIVE_TOKEN
  83. def goal(self):
  84. template = TemplateNode()
  85. while self._peek('END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT') != 'END':
  86. block = self.block(start=True)
  87. template.append(block)
  88. END = self._scan('END')
  89. return template
  90. def fragment_goal(self):
  91. fragment = FragmentNode()
  92. while self._peek('END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT') != 'END':
  93. block = self.block(start=True)
  94. fragment.append(block)
  95. END = self._scan('END')
  96. return fragment
  97. def i18n_goal(self):
  98. fragment = FragmentNode()
  99. start_pos = 0
  100. while self._peek('END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT') != 'END':
  101. text_or_placeholders = self.text_or_placeholders(start=True)
  102. end_pos = self._scanner.tokens[self._pos-1][1]
  103. fragment.append(text_or_placeholders)
  104. text_or_placeholders.start = start_pos
  105. text_or_placeholders.end = end_pos
  106. start_pos = end_pos
  107. END = self._scan('END')
  108. return fragment
  109. def statement(self):
  110. _token_ = self._peek("'implements'", "'extends'", "'absolute_extends'", "'from'", "'import'", "'slurp'", "'break'", "'continue'", "'attr'", "'filter'", "'set'", "'echo'")
  111. if _token_ == "'implements'":
  112. self._scan("'implements'")
  113. SPACE = self._scan('SPACE')
  114. ID = self._scan('ID')
  115. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  116. return ImplementsNode(ID)
  117. elif _token_ == "'extends'":
  118. self._scan("'extends'")
  119. SPACE = self._scan('SPACE')
  120. modulename = self.modulename()
  121. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  122. return ExtendsNode(modulename)
  123. elif _token_ == "'absolute_extends'":
  124. self._scan("'absolute_extends'")
  125. SPACE = self._scan('SPACE')
  126. modulename = self.modulename()
  127. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  128. return AbsoluteExtendsNode(modulename)
  129. elif _token_ == "'from'":
  130. self._scan("'from'")
  131. SPACE = self._scan('SPACE')
  132. modulename = self.modulename()
  133. SPACE = self._scan('SPACE')
  134. self._scan("'import'")
  135. SPACE = self._scan('SPACE')
  136. identifier = self.identifier()
  137. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  138. return FromNode(modulename, identifier)
  139. elif _token_ == "'import'":
  140. self._scan("'import'")
  141. SPACE = self._scan('SPACE')
  142. modulename = self.modulename()
  143. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  144. return ImportNode(modulename)
  145. elif _token_ == "'slurp'":
  146. self._scan("'slurp'")
  147. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  148. return CommentNode('slurp')
  149. elif _token_ == "'break'":
  150. self._scan("'break'")
  151. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  152. return BreakNode()
  153. elif _token_ == "'continue'":
  154. self._scan("'continue'")
  155. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  156. return ContinueNode()
  157. elif _token_ == "'attr'":
  158. self._scan("'attr'")
  159. SPACE = self._scan('SPACE')
  160. placeholder = self.placeholder()
  161. SPACE = self._scan('SPACE')
  162. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  163. SPACE = self._scan('SPACE')
  164. literal = self.literal()
  165. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  166. return AttributeNode(placeholder.name, literal)
  167. elif _token_ == "'filter'":
  168. self._scan("'filter'")
  169. SPACE = self._scan('SPACE')
  170. identifier = self.identifier()
  171. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  172. return AttributeNode('_filter_function', identifier)
  173. elif _token_ == "'set'":
  174. self._scan("'set'")
  175. SPACE = self._scan('SPACE')
  176. placeholder = self.placeholder()
  177. _lhs = IdentifierNode(placeholder.name)
  178. if self._peek('SPACE', 'ASSIGN_OPERATOR') == 'SPACE':
  179. SPACE = self._scan('SPACE')
  180. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  181. if self._peek('SPACE', '"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'") == 'SPACE':
  182. SPACE = self._scan('SPACE')
  183. expression = self.expression()
  184. _rhs = expression
  185. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  186. return AssignNode(_lhs, _rhs)
  187. else:# == "'echo'"
  188. self._scan("'echo'")
  189. SPACE = self._scan('SPACE')
  190. literal = self.literal()
  191. _true_exp = literal
  192. _test_exp, _false_exp = None, None
  193. if self._peek('SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'SPACE':
  194. SPACE = self._scan('SPACE')
  195. self._scan("'if'")
  196. SPACE = self._scan('SPACE')
  197. expression = self.expression()
  198. _test_exp = expression
  199. if self._peek('SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'SPACE':
  200. SPACE = self._scan('SPACE')
  201. self._scan("'else'")
  202. SPACE = self._scan('SPACE')
  203. literal = self.literal()
  204. _false_exp = literal
  205. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  206. return EchoNode(_true_exp, _test_exp, _false_exp)
  207. def modulename(self):
  208. identifier = self.identifier()
  209. _module_name_list = [identifier]
  210. while self._peek('DOT', 'SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'DOT':
  211. DOT = self._scan('DOT')
  212. identifier = self.identifier()
  213. _module_name_list.append(identifier)
  214. return _module_name_list
  215. def directive(self):
  216. START_DIRECTIVE = self._scan('START_DIRECTIVE')
  217. _node_list = NodeList()
  218. _token_ = self._peek('SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', "'block'", "'i18n'", "'def'", "'for[ \\t]*'", "'if'", "'implements'", "'extends'", "'absolute_extends'", "'from'", "'import'", "'slurp'", "'break'", "'continue'", "'attr'", "'filter'", "'set'", "'echo'", 'END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'")
  219. if _token_ == 'SINGLE_LINE_COMMENT':
  220. SINGLE_LINE_COMMENT = self._scan('SINGLE_LINE_COMMENT')
  221. _node_list.append(CommentNode(START_DIRECTIVE + SINGLE_LINE_COMMENT))
  222. elif _token_ == 'MULTI_LINE_COMMENT':
  223. MULTI_LINE_COMMENT = self._scan('MULTI_LINE_COMMENT')
  224. _node_list.append(CommentNode(START_DIRECTIVE +MULTI_LINE_COMMENT))
  225. elif _token_ == "'block'":
  226. self._scan("'block'")
  227. SPACE = self._scan('SPACE')
  228. ID = self._scan('ID')
  229. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  230. _block = BlockNode(ID)
  231. start = CLOSE_DIRECTIVE.endswith('\n')
  232. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', 'TEXT') != 'END_DIRECTIVE':
  233. block = self.block(start)
  234. _block.append(block)
  235. make_optional(_block.child_nodes)
  236. END_DIRECTIVE = self._scan('END_DIRECTIVE')
  237. SPACE = self._scan('SPACE')
  238. self._scan("'block'")
  239. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  240. _node_list.append(_block)
  241. elif _token_ == "'i18n'":
  242. self._scan("'i18n'")
  243. _macro = MacroNode('i18n')
  244. if self._peek('OPEN_PAREN', 'SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'OPEN_PAREN':
  245. OPEN_PAREN = self._scan('OPEN_PAREN')
  246. if self._peek('CLOSE_PAREN', 'START_PLACEHOLDER') == 'START_PLACEHOLDER':
  247. macro_parameter_list = self.macro_parameter_list()
  248. _macro.parameter_list = macro_parameter_list
  249. CLOSE_PAREN = self._scan('CLOSE_PAREN')
  250. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  251. start = CLOSE_DIRECTIVE.endswith('\n')
  252. _macro.value = ''
  253. while self._peek('I18N_BODY', 'END_DIRECTIVE') == 'I18N_BODY':
  254. I18N_BODY = self._scan('I18N_BODY')
  255. _macro.value += I18N_BODY
  256. if self._peek('START_DIRECTIVE', 'I18N_BODY', 'END_DIRECTIVE') == 'START_DIRECTIVE':
  257. START_DIRECTIVE = self._scan('START_DIRECTIVE')
  258. _macro.value += START_DIRECTIVE
  259. END_DIRECTIVE = self._scan('END_DIRECTIVE')
  260. SPACE = self._scan('SPACE')
  261. self._scan("'i18n'")
  262. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  263. _node_list.append(_macro)
  264. elif _token_ == "'def'":
  265. self._scan("'def'")
  266. SPACE = self._scan('SPACE')
  267. ID = self._scan('ID')
  268. _def = DefNode(ID)
  269. if self._peek('OPEN_PAREN', 'SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'OPEN_PAREN':
  270. OPEN_PAREN = self._scan('OPEN_PAREN')
  271. if self._peek('CLOSE_PAREN', 'START_PLACEHOLDER') == 'START_PLACEHOLDER':
  272. parameter_list = self.parameter_list()
  273. _def.parameter_list = parameter_list
  274. CLOSE_PAREN = self._scan('CLOSE_PAREN')
  275. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  276. start = CLOSE_DIRECTIVE.endswith('\n')
  277. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', 'TEXT') != 'END_DIRECTIVE':
  278. block = self.block(start)
  279. _def.append(block)
  280. make_optional(_def.child_nodes)
  281. END_DIRECTIVE = self._scan('END_DIRECTIVE')
  282. SPACE = self._scan('SPACE')
  283. self._scan("'def'")
  284. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  285. _node_list.append(_def)
  286. elif _token_ == "'for[ \\t]*'":
  287. self._scan("'for[ \\t]*'")
  288. target_list = self.target_list()
  289. self._scan("'[ \\t]*in[ \\t]*'")
  290. expression_list = self.expression_list()
  291. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  292. _for_loop = ForNode(target_list, expression_list)
  293. start = CLOSE_DIRECTIVE.endswith('\n')
  294. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', 'TEXT') != 'END_DIRECTIVE':
  295. block = self.block(start)
  296. _for_loop.append(block)
  297. make_optional(_for_loop.child_nodes)
  298. END_DIRECTIVE = self._scan('END_DIRECTIVE')
  299. SPACE = self._scan('SPACE')
  300. self._scan("'for'")
  301. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  302. _node_list.append(_for_loop)
  303. elif _token_ == "'if'":
  304. self._scan("'if'")
  305. SPACE = self._scan('SPACE')
  306. expression = self.expression()
  307. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  308. _if_node = IfNode(expression)
  309. _last_condition_node = _if_node
  310. start = CLOSE_DIRECTIVE.endswith('\n')
  311. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', "'#elif'", 'TEXT', "'#else'", 'END_DIRECTIVE') not in ["'#elif'", "'#else'", 'END_DIRECTIVE']:
  312. block = self.block(start)
  313. _if_node.append(block)
  314. make_optional(_if_node.child_nodes)
  315. while self._peek("'#elif'", 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', "'#else'", 'TEXT', 'END_DIRECTIVE') == "'#elif'":
  316. self._scan("'#elif'")
  317. SPACE = self._scan('SPACE')
  318. expression = self.expression()
  319. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  320. _elif_node = IfNode(expression)
  321. _last_condition_node.else_.append(_elif_node)
  322. _last_condition_node = _elif_node
  323. start = CLOSE_DIRECTIVE.endswith('\n')
  324. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT', "'#elif'", "'#else'", 'END_DIRECTIVE') not in ["'#elif'", "'#else'", 'END_DIRECTIVE']:
  325. block = self.block(start)
  326. _elif_node.append(block)
  327. make_optional(_last_condition_node.child_nodes)
  328. if self._peek("'#else'", 'END_DIRECTIVE') == "'#else'":
  329. self._scan("'#else'")
  330. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  331. start = CLOSE_DIRECTIVE.endswith('\n')
  332. while self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT', 'END_DIRECTIVE') != 'END_DIRECTIVE':
  333. block = self.block(start)
  334. _last_condition_node.else_.append(block)
  335. make_optional(_last_condition_node.else_.child_nodes)
  336. END_DIRECTIVE = self._scan('END_DIRECTIVE')
  337. SPACE = self._scan('SPACE')
  338. self._scan("'if'")
  339. CLOSE_DIRECTIVE = self.CLOSE_DIRECTIVE()
  340. _node_list.append(_if_node)
  341. elif _token_ not in ['END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'"]:
  342. statement = self.statement()
  343. statement.statement = True
  344. _node_list.append(statement)
  345. else:
  346. _node_list.append(TextNode(START_DIRECTIVE))
  347. return _node_list
  348. def block(self, start=False):
  349. _token_ = self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT')
  350. if _token_ == 'START_DIRECTIVE':
  351. directive = self.directive()
  352. return directive
  353. elif _token_ == 'TEXT':
  354. text = self.text()
  355. return text
  356. elif _token_ == 'SPACE':
  357. SPACE = self._scan('SPACE')
  358. _node_list = NodeList()
  359. _node_list.append(WhitespaceNode(SPACE))
  360. if self._peek('START_DIRECTIVE', 'END', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'") == 'START_DIRECTIVE':
  361. directive = self.directive()
  362. if start: _node_list[-1] = OptionalWhitespaceNode(SPACE)
  363. _node_list.append(directive)
  364. return _node_list
  365. elif _token_ == 'NEWLINE':
  366. NEWLINE = self._scan('NEWLINE')
  367. _node_list = NodeList()
  368. _node_list.append(NewlineNode(NEWLINE))
  369. if self._peek('SPACE', 'END', 'START_DIRECTIVE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'") == 'SPACE':
  370. SPACE = self._scan('SPACE')
  371. _node_list.append(WhitespaceNode(SPACE))
  372. if self._peek('START_DIRECTIVE', 'END', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'") == 'START_DIRECTIVE':
  373. directive = self.directive()
  374. _node_list[-1] = OptionalWhitespaceNode(SPACE)
  375. _node_list.append(directive)
  376. return _node_list
  377. else:# == 'START_PLACEHOLDER'
  378. _parameter_list = None
  379. START_PLACEHOLDER = self._scan('START_PLACEHOLDER')
  380. _primary = TextNode(START_PLACEHOLDER)
  381. if self._peek('PLACEHOLDER_OPEN_BRACE', 'ID', 'END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'") in ['PLACEHOLDER_OPEN_BRACE', 'ID']:
  382. _token_ = self._peek('PLACEHOLDER_OPEN_BRACE', 'ID')
  383. if _token_ == 'PLACEHOLDER_OPEN_BRACE':
  384. PLACEHOLDER_OPEN_BRACE = self._scan('PLACEHOLDER_OPEN_BRACE')
  385. placeholder_in_text = self.placeholder_in_text()
  386. _primary = placeholder_in_text
  387. if self._peek('PIPE', 'PLACEHOLDER_CLOSE_BRACE') == 'PIPE':
  388. PIPE = self._scan('PIPE')
  389. placeholder_parameter_list = self.placeholder_parameter_list()
  390. _parameter_list = placeholder_parameter_list
  391. PLACEHOLDER_CLOSE_BRACE = self._scan('PLACEHOLDER_CLOSE_BRACE')
  392. else:# == 'ID'
  393. placeholder_in_text = self.placeholder_in_text()
  394. _primary = placeholder_in_text
  395. if type(_primary) != TextNode: return PlaceholderSubstitutionNode(_primary, _parameter_list)
  396. return _primary
  397. def text_or_placeholders(self, start=False):
  398. _token_ = self._peek('START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT')
  399. if _token_ == 'START_DIRECTIVE':
  400. START_DIRECTIVE = self._scan('START_DIRECTIVE')
  401. return TextNode(START_DIRECTIVE)
  402. elif _token_ == 'TEXT':
  403. text = self.text()
  404. return text
  405. elif _token_ == 'SPACE':
  406. SPACE = self._scan('SPACE')
  407. _node_list = NodeList()
  408. _node_list.append(WhitespaceNode(SPACE))
  409. return _node_list
  410. elif _token_ == 'NEWLINE':
  411. NEWLINE = self._scan('NEWLINE')
  412. _node_list = NodeList()
  413. _node_list.append(NewlineNode(NEWLINE))
  414. if self._peek('SPACE', 'END', 'START_DIRECTIVE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT') == 'SPACE':
  415. SPACE = self._scan('SPACE')
  416. _node_list.append(WhitespaceNode(SPACE))
  417. return _node_list
  418. else:# == 'START_PLACEHOLDER'
  419. _parameter_list = None
  420. START_PLACEHOLDER = self._scan('START_PLACEHOLDER')
  421. _primary = TextNode(START_PLACEHOLDER)
  422. if self._peek('PLACEHOLDER_OPEN_BRACE', 'ID', 'END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'TEXT') in ['PLACEHOLDER_OPEN_BRACE', 'ID']:
  423. _token_ = self._peek('PLACEHOLDER_OPEN_BRACE', 'ID')
  424. if _token_ == 'PLACEHOLDER_OPEN_BRACE':
  425. PLACEHOLDER_OPEN_BRACE = self._scan('PLACEHOLDER_OPEN_BRACE')
  426. placeholder_in_text = self.placeholder_in_text()
  427. _primary = placeholder_in_text
  428. if self._peek('PIPE', 'PLACEHOLDER_CLOSE_BRACE') == 'PIPE':
  429. PIPE = self._scan('PIPE')
  430. placeholder_parameter_list = self.placeholder_parameter_list()
  431. _parameter_list = placeholder_parameter_list
  432. PLACEHOLDER_CLOSE_BRACE = self._scan('PLACEHOLDER_CLOSE_BRACE')
  433. else:# == 'ID'
  434. placeholder_in_text = self.placeholder_in_text()
  435. _primary = placeholder_in_text
  436. if type(_primary) == TextNode: return _primary
  437. _placeholder_sub = PlaceholderSubstitutionNode(_primary, _parameter_list)
  438. return _placeholder_sub
  439. def text(self):
  440. TEXT = self._scan('TEXT')
  441. return TextNode(TEXT)
  442. def placeholder_in_text(self):
  443. ID = self._scan('ID')
  444. _primary = PlaceholderNode(ID)
  445. while self._peek('DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET', 'PIPE', 'PLACEHOLDER_CLOSE_BRACE', 'END', 'START_DIRECTIVE', 'SPACE', 'NEWLINE', 'START_PLACEHOLDER', 'END_DIRECTIVE', "'#elif'", 'TEXT', "'#else'") in ['DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET']:
  446. placeholder_suffix_expression = self.placeholder_suffix_expression(_primary)
  447. _primary = placeholder_suffix_expression
  448. return _primary
  449. def placeholder_suffix_expression(self, _previous_primary):
  450. _token_ = self._peek('DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET')
  451. if _token_ == 'DOT':
  452. DOT = self._scan('DOT')
  453. ID = self._scan('ID')
  454. _primary = GetUDNNode(_previous_primary, ID)
  455. elif _token_ == 'PLACEHOLDER_OPEN_PAREN':
  456. PLACEHOLDER_OPEN_PAREN = self._scan('PLACEHOLDER_OPEN_PAREN')
  457. _arg_list = None
  458. if self._peek('CLOSE_PAREN', '"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'") != 'CLOSE_PAREN':
  459. argument_list = self.argument_list()
  460. _arg_list = argument_list
  461. CLOSE_PAREN = self._scan('CLOSE_PAREN')
  462. _primary = CallFunctionNode(_previous_primary, _arg_list)
  463. else:# == 'OPEN_BRACKET'
  464. OPEN_BRACKET = self._scan('OPEN_BRACKET')
  465. expression = self.expression()
  466. _primary = SliceNode(_previous_primary, expression)
  467. CLOSE_BRACKET = self._scan('CLOSE_BRACKET')
  468. return _primary
  469. def placeholder(self):
  470. START_PLACEHOLDER = self._scan('START_PLACEHOLDER')
  471. _token_ = self._peek('ID')
  472. if _token_ == 'ID': return PlaceholderNode(self._scan('ID'))
  473. return TextNode(START_PLACEHOLDER)
  474. def target_list(self):
  475. _target_list = TargetListNode()
  476. target = self.target()
  477. _target_list.append(target)
  478. while self._peek('COMMA_DELIMITER', "'[ \\t]*in[ \\t]*'", 'CLOSE_PAREN', 'CLOSE_BRACKET') == 'COMMA_DELIMITER':
  479. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  480. target = self.target()
  481. _target_list.append(target)
  482. return _target_list
  483. def expression_list(self):
  484. _expression_list = ExpressionListNode()
  485. expression = self.expression()
  486. _expression_list.append(expression)
  487. while self._peek('COMMA_DELIMITER', 'SPACE', 'CLOSE_DIRECTIVE_TOKEN') == 'COMMA_DELIMITER':
  488. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  489. expression = self.expression()
  490. _expression_list.append(expression)
  491. return _expression_list
  492. def target(self):
  493. _token_ = self._peek('START_PLACEHOLDER', 'OPEN_PAREN', 'OPEN_BRACKET')
  494. if _token_ == 'START_PLACEHOLDER':
  495. placeholder = self.placeholder()
  496. return TargetNode(placeholder.name)
  497. elif _token_ == 'OPEN_PAREN':
  498. OPEN_PAREN = self._scan('OPEN_PAREN')
  499. target_list = self.target_list()
  500. CLOSE_PAREN = self._scan('CLOSE_PAREN')
  501. return target_list
  502. else:# == 'OPEN_BRACKET'
  503. OPEN_BRACKET = self._scan('OPEN_BRACKET')
  504. target_list = self.target_list()
  505. CLOSE_BRACKET = self._scan('CLOSE_BRACKET')
  506. return target_list
  507. def parameter(self):
  508. placeholder = self.placeholder()
  509. _node = ParameterNode(placeholder.name)
  510. if self._peek('ASSIGN_OPERATOR', 'COMMA_DELIMITER', 'CLOSE_PAREN') == 'ASSIGN_OPERATOR':
  511. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  512. expression = self.expression()
  513. _node.default = expression
  514. return _node
  515. def parameter_list(self):
  516. _parameter_list = ParameterListNode()
  517. parameter = self.parameter()
  518. _parameter_list.append(parameter)
  519. while self._peek('COMMA_DELIMITER', 'CLOSE_PAREN') == 'COMMA_DELIMITER':
  520. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  521. parameter = self.parameter()
  522. _parameter_list.append(parameter)
  523. return _parameter_list
  524. def macro_parameter(self):
  525. placeholder = self.placeholder()
  526. _node = ParameterNode(placeholder.name)
  527. if self._peek('ASSIGN_OPERATOR', 'COMMA_DELIMITER', 'CLOSE_PAREN') == 'ASSIGN_OPERATOR':
  528. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  529. literal = self.literal()
  530. _node.default = literal
  531. return _node
  532. def macro_parameter_list(self):
  533. _parameter_list = ParameterListNode()
  534. macro_parameter = self.macro_parameter()
  535. _parameter_list.append(macro_parameter)
  536. while self._peek('COMMA_DELIMITER', 'CLOSE_PAREN') == 'COMMA_DELIMITER':
  537. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  538. macro_parameter = self.macro_parameter()
  539. _parameter_list.append(macro_parameter)
  540. return _parameter_list
  541. def literal_or_identifier(self):
  542. _token_ = self._peek('"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'ID')
  543. if _token_ != 'ID':
  544. literal = self.literal()
  545. return literal
  546. else:# == 'ID'
  547. identifier = self.identifier()
  548. return identifier
  549. def placeholder_parameter(self):
  550. identifier = self.identifier()
  551. _node = ParameterNode(identifier.name)
  552. if self._peek('ASSIGN_OPERATOR', 'COMMA_DELIMITER', 'PLACEHOLDER_CLOSE_BRACE') == 'ASSIGN_OPERATOR':
  553. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  554. literal_or_identifier = self.literal_or_identifier()
  555. _node.default = literal_or_identifier
  556. return _node
  557. def placeholder_parameter_list(self):
  558. _parameter_list = ParameterListNode()
  559. placeholder_parameter = self.placeholder_parameter()
  560. _parameter_list.append(placeholder_parameter)
  561. while self._peek('COMMA_DELIMITER', 'PLACEHOLDER_CLOSE_BRACE') == 'COMMA_DELIMITER':
  562. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  563. placeholder_parameter = self.placeholder_parameter()
  564. _parameter_list.append(placeholder_parameter)
  565. return _parameter_list
  566. def stringliteral(self):
  567. _token_ = self._peek('\'"\'', '"\'"')
  568. if _token_ == '\'"\'':
  569. self._scan('\'"\'')
  570. DOUBLE_QUOTE_STR = self._scan('DOUBLE_QUOTE_STR')
  571. self._scan('\'"\'')
  572. return unicode(eval('"%s"' % DOUBLE_QUOTE_STR))
  573. else:# == '"\'"'
  574. self._scan('"\'"')
  575. SINGLE_QUOTE_STR = self._scan('SINGLE_QUOTE_STR')
  576. self._scan('"\'"')
  577. return unicode(eval("'%s'" % SINGLE_QUOTE_STR))
  578. def literal(self):
  579. _token_ = self._peek('"True"', '"False"', '\'"\'', '"\'"', 'NUM')
  580. if _token_ == '"True"':
  581. self._scan('"True"')
  582. return LiteralNode(True)
  583. elif _token_ == '"False"':
  584. self._scan('"False"')
  585. return LiteralNode(False)
  586. elif _token_ != 'NUM':
  587. stringliteral = self.stringliteral()
  588. return LiteralNode(stringliteral)
  589. else:# == 'NUM'
  590. NUM = self._scan('NUM')
  591. int_part = NUM
  592. if self._peek('"\\."', 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET', 'COMMA_DELIMITER', "'[ \\t]*\\*[ \\t]*'", 'CLOSE_PAREN', "'[ \\t]*\\/[ \\t]*'", "'[ \\t]*\\%[ \\t]*'", 'PLACEHOLDER_CLOSE_BRACE', "'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'END', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_BRACE') == '"\\."':
  593. self._scan('"\\."')
  594. NUM = self._scan('NUM')
  595. return LiteralNode(float('%s.%s' % (int_part, NUM)))
  596. return LiteralNode(int(int_part))
  597. def identifier(self):
  598. ID = self._scan('ID')
  599. return IdentifierNode(ID)
  600. def primary(self, in_placeholder_context=False):
  601. _token_ = self._peek('START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE')
  602. if _token_ == 'START_PLACEHOLDER':
  603. placeholder = self.placeholder()
  604. _primary = placeholder
  605. elif _token_ == 'ID':
  606. identifier = self.identifier()
  607. _primary = identifier
  608. if in_placeholder_context: _primary = PlaceholderNode(_primary.name)
  609. elif _token_ not in ['OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE']:
  610. literal = self.literal()
  611. _primary = literal
  612. elif _token_ == 'OPEN_BRACKET':
  613. OPEN_BRACKET = self._scan('OPEN_BRACKET')
  614. _list_literal = ListLiteralNode()
  615. if self._peek('COMMA_DELIMITER', 'CLOSE_BRACKET', '"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'") not in ['COMMA_DELIMITER', 'CLOSE_BRACKET']:
  616. expression = self.expression()
  617. _list_literal.append(expression)
  618. while self._peek('COMMA_DELIMITER', 'CLOSE_BRACKET') == 'COMMA_DELIMITER':
  619. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  620. expression = self.expression()
  621. _list_literal.append(expression)
  622. CLOSE_BRACKET = self._scan('CLOSE_BRACKET')
  623. _primary = _list_literal
  624. elif _token_ == 'OPEN_PAREN':
  625. OPEN_PAREN = self._scan('OPEN_PAREN')
  626. _tuple_literal = TupleLiteralNode()
  627. if self._peek('COMMA_DELIMITER', 'CLOSE_PAREN', '"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'") not in ['COMMA_DELIMITER', 'CLOSE_PAREN']:
  628. expression = self.expression()
  629. _tuple_literal.append(expression)
  630. while self._peek('COMMA_DELIMITER', 'CLOSE_PAREN') == 'COMMA_DELIMITER':
  631. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  632. expression = self.expression()
  633. _tuple_literal.append(expression)
  634. CLOSE_PAREN = self._scan('CLOSE_PAREN')
  635. _primary = _tuple_literal
  636. else:# == 'OPEN_BRACE'
  637. OPEN_BRACE = self._scan('OPEN_BRACE')
  638. _dict_literal = DictLiteralNode()
  639. if self._peek('COMMA_DELIMITER', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE', '"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'") not in ['COMMA_DELIMITER', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE']:
  640. expression = self.expression()
  641. _key = expression
  642. COLON_DELIMITER = self._scan('COLON_DELIMITER')
  643. expression = self.expression()
  644. _dict_literal.append((_key, expression))
  645. while self._peek('COMMA_DELIMITER', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == 'COMMA_DELIMITER':
  646. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  647. expression = self.expression()
  648. _key = expression
  649. COLON_DELIMITER = self._scan('COLON_DELIMITER')
  650. expression = self.expression()
  651. _dict_literal.append((_key, expression))
  652. _token_ = self._peek('CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE')
  653. if _token_ == 'CLOSE_BRACE':
  654. CLOSE_BRACE = self._scan('CLOSE_BRACE')
  655. else:# == 'PLACEHOLDER_CLOSE_BRACE'
  656. PLACEHOLDER_CLOSE_BRACE = self._scan('PLACEHOLDER_CLOSE_BRACE')
  657. _primary = _dict_literal
  658. while self._peek('DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET', "'[ \\t]*\\*[ \\t]*'", "'[ \\t]*\\/[ \\t]*'", "'[ \\t]*\\%[ \\t]*'", "'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') in ['DOT', 'PLACEHOLDER_OPEN_PAREN', 'OPEN_BRACKET']:
  659. placeholder_suffix_expression = self.placeholder_suffix_expression(_primary)
  660. _primary = placeholder_suffix_expression
  661. return _primary
  662. def define_list(self):
  663. argument_list = self.argument_list()
  664. END = self._scan('END')
  665. return argument_list
  666. def rhs_expression(self):
  667. expression = self.expression()
  668. END = self._scan('END')
  669. return expression
  670. def argument_list(self):
  671. _pargs, _kargs = [], []
  672. expression = self.expression()
  673. _arg = expression
  674. while self._peek('COMMA_DELIMITER', 'ASSIGN_OPERATOR', 'END', 'CLOSE_PAREN') == 'COMMA_DELIMITER':
  675. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  676. _pargs.append(_arg)
  677. expression = self.expression()
  678. _arg = expression
  679. if self._peek('ASSIGN_OPERATOR', 'COMMA_DELIMITER', 'END', 'CLOSE_PAREN') == 'ASSIGN_OPERATOR':
  680. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  681. if not isinstance(_arg, (IdentifierNode)): raise SyntaxError(self._scanner.pos, "keyword arg can't be complex expression: %s" % _arg)
  682. _karg = ParameterNode(_arg.name)
  683. _arg = None
  684. expression = self.expression()
  685. _karg.default = expression
  686. _kargs.append(_karg)
  687. while self._peek('COMMA_DELIMITER', 'END', 'CLOSE_PAREN') == 'COMMA_DELIMITER':
  688. COMMA_DELIMITER = self._scan('COMMA_DELIMITER')
  689. identifier = self.identifier()
  690. _karg = ParameterNode(identifier.name)
  691. ASSIGN_OPERATOR = self._scan('ASSIGN_OPERATOR')
  692. expression = self.expression()
  693. _karg.default = expression
  694. _kargs.append(_karg)
  695. if _arg: _pargs.append(_arg)
  696. return ArgListNode(_pargs, _kargs)
  697. def expression(self):
  698. or_test = self.or_test()
  699. return or_test
  700. def or_test(self):
  701. and_test = self.and_test()
  702. _test = and_test
  703. while self._peek("'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*or[ \\t]*'":
  704. self._scan("'[ \\t]*or[ \\t]*'")
  705. and_test = self.and_test()
  706. _test = BinOpExpressionNode('or', _test, and_test)
  707. return _test
  708. def and_test(self):
  709. not_test = self.not_test()
  710. _test = not_test
  711. while self._peek("'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*and[ \\t]*'":
  712. self._scan("'[ \\t]*and[ \\t]*'")
  713. not_test = self.not_test()
  714. _test = BinOpExpressionNode('and', _test, not_test)
  715. return _test
  716. def not_test(self):
  717. _token_ = self._peek('"[ \\t]*not[ \\t]*"', 'START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'")
  718. if _token_ != '"[ \\t]*not[ \\t]*"':
  719. comparison = self.comparison()
  720. return comparison
  721. else:# == '"[ \\t]*not[ \\t]*"'
  722. self._scan('"[ \\t]*not[ \\t]*"')
  723. not_test = self.not_test()
  724. return UnaryOpNode('not', not_test)
  725. def u_expr(self):
  726. _token_ = self._peek('START_PLACEHOLDER', 'ID', '"True"', '"False"', '\'"\'', '"\'"', 'NUM', 'OPEN_BRACKET', 'OPEN_PAREN', 'OPEN_BRACE', "'[ \\t]*\\-[ \\t]*'")
  727. if _token_ != "'[ \\t]*\\-[ \\t]*'":
  728. primary = self.primary()
  729. return primary
  730. else:# == "'[ \\t]*\\-[ \\t]*'"
  731. self._scan("'[ \\t]*\\-[ \\t]*'")
  732. u_expr = self.u_expr()
  733. return UnaryOpNode('-', u_expr)
  734. def m_expr(self):
  735. u_expr = self.u_expr()
  736. _expr = u_expr
  737. while self._peek("'[ \\t]*\\*[ \\t]*'", "'[ \\t]*\\/[ \\t]*'", "'[ \\t]*\\%[ \\t]*'", "'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*\\*[ \\t]*'":
  738. self._scan("'[ \\t]*\\*[ \\t]*'")
  739. u_expr = self.u_expr()
  740. _expr = BinOpExpressionNode('*', _expr, u_expr)
  741. while self._peek("'[ \\t]*\\/[ \\t]*'", "'[ \\t]*\\%[ \\t]*'", "'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*\\/[ \\t]*'":
  742. self._scan("'[ \\t]*\\/[ \\t]*'")
  743. u_expr = self.u_expr()
  744. _expr = BinOpExpressionNode('/', _expr, u_expr)
  745. while self._peek("'[ \\t]*\\%[ \\t]*'", "'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*\\%[ \\t]*'":
  746. self._scan("'[ \\t]*\\%[ \\t]*'")
  747. u_expr = self.u_expr()
  748. _expr = BinOpExpressionNode('%', _expr, u_expr)
  749. return _expr
  750. def a_expr(self):
  751. m_expr = self.m_expr()
  752. _expr = m_expr
  753. while self._peek("'[ \\t]*\\+[ \\t]*'", "'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*\\+[ \\t]*'":
  754. self._scan("'[ \\t]*\\+[ \\t]*'")
  755. m_expr = self.m_expr()
  756. _expr = BinOpExpressionNode('+', _expr, m_expr)
  757. while self._peek("'[ \\t]*\\-[ \\t]*'", 'COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == "'[ \\t]*\\-[ \\t]*'":
  758. self._scan("'[ \\t]*\\-[ \\t]*'")
  759. m_expr = self.m_expr()
  760. _expr = BinOpExpressionNode('-', _expr, m_expr)
  761. return _expr
  762. def comparison(self):
  763. a_expr = self.a_expr()
  764. _left_side = a_expr
  765. while self._peek('COMP_OPERATOR', "'[ \\t]*and[ \\t]*'", "'[ \\t]*or[ \\t]*'", 'SPACE', 'CLOSE_DIRECTIVE_TOKEN', 'END', 'COMMA_DELIMITER', 'COLON_DELIMITER', 'CLOSE_BRACKET', 'ASSIGN_OPERATOR', 'CLOSE_PAREN', 'CLOSE_BRACE', 'PLACEHOLDER_CLOSE_BRACE') == 'COMP_OPERATOR':
  766. COMP_OPERATOR = self._scan('COMP_OPERATOR')
  767. a_expr = self.a_expr()
  768. _left_side = BinOpExpressionNode(COMP_OPERATOR.strip(), _left_side, a_expr)
  769. return _left_side
  770. def parse(rule, text):
  771. P = SpitfireParser(SpitfireParserScanner(text))
  772. return wrap_error_reporter(P, rule)
  773. if __name__ == '__main__':
  774. from sys import argv, stdin
  775. if len(argv) >= 2:
  776. if len(argv) >= 3:
  777. f = open(argv[2],'r')
  778. else:
  779. f = stdin
  780. print parse(argv[1], f.read())
  781. else: print 'Args: <rule> [<filename>]'