Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/jinja2/parser.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
| author | shellac |
|---|---|
| date | Mon, 22 Mar 2021 18:12:50 +0000 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:4f3585e2f14b |
|---|---|
| 1 # -*- coding: utf-8 -*- | |
| 2 """Parse tokens from the lexer into nodes for the compiler.""" | |
| 3 from . import nodes | |
| 4 from ._compat import imap | |
| 5 from .exceptions import TemplateAssertionError | |
| 6 from .exceptions import TemplateSyntaxError | |
| 7 from .lexer import describe_token | |
| 8 from .lexer import describe_token_expr | |
| 9 | |
| 10 _statement_keywords = frozenset( | |
| 11 [ | |
| 12 "for", | |
| 13 "if", | |
| 14 "block", | |
| 15 "extends", | |
| 16 "print", | |
| 17 "macro", | |
| 18 "include", | |
| 19 "from", | |
| 20 "import", | |
| 21 "set", | |
| 22 "with", | |
| 23 "autoescape", | |
| 24 ] | |
| 25 ) | |
| 26 _compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"]) | |
| 27 | |
| 28 _math_nodes = { | |
| 29 "add": nodes.Add, | |
| 30 "sub": nodes.Sub, | |
| 31 "mul": nodes.Mul, | |
| 32 "div": nodes.Div, | |
| 33 "floordiv": nodes.FloorDiv, | |
| 34 "mod": nodes.Mod, | |
| 35 } | |
| 36 | |
| 37 | |
| 38 class Parser(object): | |
| 39 """This is the central parsing class Jinja uses. It's passed to | |
| 40 extensions and can be used to parse expressions or statements. | |
| 41 """ | |
| 42 | |
| 43 def __init__(self, environment, source, name=None, filename=None, state=None): | |
| 44 self.environment = environment | |
| 45 self.stream = environment._tokenize(source, name, filename, state) | |
| 46 self.name = name | |
| 47 self.filename = filename | |
| 48 self.closed = False | |
| 49 self.extensions = {} | |
| 50 for extension in environment.iter_extensions(): | |
| 51 for tag in extension.tags: | |
| 52 self.extensions[tag] = extension.parse | |
| 53 self._last_identifier = 0 | |
| 54 self._tag_stack = [] | |
| 55 self._end_token_stack = [] | |
| 56 | |
| 57 def fail(self, msg, lineno=None, exc=TemplateSyntaxError): | |
| 58 """Convenience method that raises `exc` with the message, passed | |
| 59 line number or last line number as well as the current name and | |
| 60 filename. | |
| 61 """ | |
| 62 if lineno is None: | |
| 63 lineno = self.stream.current.lineno | |
| 64 raise exc(msg, lineno, self.name, self.filename) | |
| 65 | |
| 66 def _fail_ut_eof(self, name, end_token_stack, lineno): | |
| 67 expected = [] | |
| 68 for exprs in end_token_stack: | |
| 69 expected.extend(imap(describe_token_expr, exprs)) | |
| 70 if end_token_stack: | |
| 71 currently_looking = " or ".join( | |
| 72 "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1] | |
| 73 ) | |
| 74 else: | |
| 75 currently_looking = None | |
| 76 | |
| 77 if name is None: | |
| 78 message = ["Unexpected end of template."] | |
| 79 else: | |
| 80 message = ["Encountered unknown tag '%s'." % name] | |
| 81 | |
| 82 if currently_looking: | |
| 83 if name is not None and name in expected: | |
| 84 message.append( | |
| 85 "You probably made a nesting mistake. Jinja " | |
| 86 "is expecting this tag, but currently looking " | |
| 87 "for %s." % currently_looking | |
| 88 ) | |
| 89 else: | |
| 90 message.append( | |
| 91 "Jinja was looking for the following tags: " | |
| 92 "%s." % currently_looking | |
| 93 ) | |
| 94 | |
| 95 if self._tag_stack: | |
| 96 message.append( | |
| 97 "The innermost block that needs to be " | |
| 98 "closed is '%s'." % self._tag_stack[-1] | |
| 99 ) | |
| 100 | |
| 101 self.fail(" ".join(message), lineno) | |
| 102 | |
| 103 def fail_unknown_tag(self, name, lineno=None): | |
| 104 """Called if the parser encounters an unknown tag. Tries to fail | |
| 105 with a human readable error message that could help to identify | |
| 106 the problem. | |
| 107 """ | |
| 108 return self._fail_ut_eof(name, self._end_token_stack, lineno) | |
| 109 | |
| 110 def fail_eof(self, end_tokens=None, lineno=None): | |
| 111 """Like fail_unknown_tag but for end of template situations.""" | |
| 112 stack = list(self._end_token_stack) | |
| 113 if end_tokens is not None: | |
| 114 stack.append(end_tokens) | |
| 115 return self._fail_ut_eof(None, stack, lineno) | |
| 116 | |
| 117 def is_tuple_end(self, extra_end_rules=None): | |
| 118 """Are we at the end of a tuple?""" | |
| 119 if self.stream.current.type in ("variable_end", "block_end", "rparen"): | |
| 120 return True | |
| 121 elif extra_end_rules is not None: | |
| 122 return self.stream.current.test_any(extra_end_rules) | |
| 123 return False | |
| 124 | |
| 125 def free_identifier(self, lineno=None): | |
| 126 """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" | |
| 127 self._last_identifier += 1 | |
| 128 rv = object.__new__(nodes.InternalName) | |
| 129 nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno) | |
| 130 return rv | |
| 131 | |
| 132 def parse_statement(self): | |
| 133 """Parse a single statement.""" | |
| 134 token = self.stream.current | |
| 135 if token.type != "name": | |
| 136 self.fail("tag name expected", token.lineno) | |
| 137 self._tag_stack.append(token.value) | |
| 138 pop_tag = True | |
| 139 try: | |
| 140 if token.value in _statement_keywords: | |
| 141 return getattr(self, "parse_" + self.stream.current.value)() | |
| 142 if token.value == "call": | |
| 143 return self.parse_call_block() | |
| 144 if token.value == "filter": | |
| 145 return self.parse_filter_block() | |
| 146 ext = self.extensions.get(token.value) | |
| 147 if ext is not None: | |
| 148 return ext(self) | |
| 149 | |
| 150 # did not work out, remove the token we pushed by accident | |
| 151 # from the stack so that the unknown tag fail function can | |
| 152 # produce a proper error message. | |
| 153 self._tag_stack.pop() | |
| 154 pop_tag = False | |
| 155 self.fail_unknown_tag(token.value, token.lineno) | |
| 156 finally: | |
| 157 if pop_tag: | |
| 158 self._tag_stack.pop() | |
| 159 | |
| 160 def parse_statements(self, end_tokens, drop_needle=False): | |
| 161 """Parse multiple statements into a list until one of the end tokens | |
| 162 is reached. This is used to parse the body of statements as it also | |
| 163 parses template data if appropriate. The parser checks first if the | |
| 164 current token is a colon and skips it if there is one. Then it checks | |
| 165 for the block end and parses until if one of the `end_tokens` is | |
| 166 reached. Per default the active token in the stream at the end of | |
| 167 the call is the matched end token. If this is not wanted `drop_needle` | |
| 168 can be set to `True` and the end token is removed. | |
| 169 """ | |
| 170 # the first token may be a colon for python compatibility | |
| 171 self.stream.skip_if("colon") | |
| 172 | |
| 173 # in the future it would be possible to add whole code sections | |
| 174 # by adding some sort of end of statement token and parsing those here. | |
| 175 self.stream.expect("block_end") | |
| 176 result = self.subparse(end_tokens) | |
| 177 | |
| 178 # we reached the end of the template too early, the subparser | |
| 179 # does not check for this, so we do that now | |
| 180 if self.stream.current.type == "eof": | |
| 181 self.fail_eof(end_tokens) | |
| 182 | |
| 183 if drop_needle: | |
| 184 next(self.stream) | |
| 185 return result | |
| 186 | |
| 187 def parse_set(self): | |
| 188 """Parse an assign statement.""" | |
| 189 lineno = next(self.stream).lineno | |
| 190 target = self.parse_assign_target(with_namespace=True) | |
| 191 if self.stream.skip_if("assign"): | |
| 192 expr = self.parse_tuple() | |
| 193 return nodes.Assign(target, expr, lineno=lineno) | |
| 194 filter_node = self.parse_filter(None) | |
| 195 body = self.parse_statements(("name:endset",), drop_needle=True) | |
| 196 return nodes.AssignBlock(target, filter_node, body, lineno=lineno) | |
| 197 | |
| 198 def parse_for(self): | |
| 199 """Parse a for loop.""" | |
| 200 lineno = self.stream.expect("name:for").lineno | |
| 201 target = self.parse_assign_target(extra_end_rules=("name:in",)) | |
| 202 self.stream.expect("name:in") | |
| 203 iter = self.parse_tuple( | |
| 204 with_condexpr=False, extra_end_rules=("name:recursive",) | |
| 205 ) | |
| 206 test = None | |
| 207 if self.stream.skip_if("name:if"): | |
| 208 test = self.parse_expression() | |
| 209 recursive = self.stream.skip_if("name:recursive") | |
| 210 body = self.parse_statements(("name:endfor", "name:else")) | |
| 211 if next(self.stream).value == "endfor": | |
| 212 else_ = [] | |
| 213 else: | |
| 214 else_ = self.parse_statements(("name:endfor",), drop_needle=True) | |
| 215 return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno) | |
| 216 | |
| 217 def parse_if(self): | |
| 218 """Parse an if construct.""" | |
| 219 node = result = nodes.If(lineno=self.stream.expect("name:if").lineno) | |
| 220 while 1: | |
| 221 node.test = self.parse_tuple(with_condexpr=False) | |
| 222 node.body = self.parse_statements(("name:elif", "name:else", "name:endif")) | |
| 223 node.elif_ = [] | |
| 224 node.else_ = [] | |
| 225 token = next(self.stream) | |
| 226 if token.test("name:elif"): | |
| 227 node = nodes.If(lineno=self.stream.current.lineno) | |
| 228 result.elif_.append(node) | |
| 229 continue | |
| 230 elif token.test("name:else"): | |
| 231 result.else_ = self.parse_statements(("name:endif",), drop_needle=True) | |
| 232 break | |
| 233 return result | |
| 234 | |
| 235 def parse_with(self): | |
| 236 node = nodes.With(lineno=next(self.stream).lineno) | |
| 237 targets = [] | |
| 238 values = [] | |
| 239 while self.stream.current.type != "block_end": | |
| 240 if targets: | |
| 241 self.stream.expect("comma") | |
| 242 target = self.parse_assign_target() | |
| 243 target.set_ctx("param") | |
| 244 targets.append(target) | |
| 245 self.stream.expect("assign") | |
| 246 values.append(self.parse_expression()) | |
| 247 node.targets = targets | |
| 248 node.values = values | |
| 249 node.body = self.parse_statements(("name:endwith",), drop_needle=True) | |
| 250 return node | |
| 251 | |
| 252 def parse_autoescape(self): | |
| 253 node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno) | |
| 254 node.options = [nodes.Keyword("autoescape", self.parse_expression())] | |
| 255 node.body = self.parse_statements(("name:endautoescape",), drop_needle=True) | |
| 256 return nodes.Scope([node]) | |
| 257 | |
| 258 def parse_block(self): | |
| 259 node = nodes.Block(lineno=next(self.stream).lineno) | |
| 260 node.name = self.stream.expect("name").value | |
| 261 node.scoped = self.stream.skip_if("name:scoped") | |
| 262 | |
| 263 # common problem people encounter when switching from django | |
| 264 # to jinja. we do not support hyphens in block names, so let's | |
| 265 # raise a nicer error message in that case. | |
| 266 if self.stream.current.type == "sub": | |
| 267 self.fail( | |
| 268 "Block names in Jinja have to be valid Python " | |
| 269 "identifiers and may not contain hyphens, use an " | |
| 270 "underscore instead." | |
| 271 ) | |
| 272 | |
| 273 node.body = self.parse_statements(("name:endblock",), drop_needle=True) | |
| 274 self.stream.skip_if("name:" + node.name) | |
| 275 return node | |
| 276 | |
| 277 def parse_extends(self): | |
| 278 node = nodes.Extends(lineno=next(self.stream).lineno) | |
| 279 node.template = self.parse_expression() | |
| 280 return node | |
| 281 | |
| 282 def parse_import_context(self, node, default): | |
| 283 if self.stream.current.test_any( | |
| 284 "name:with", "name:without" | |
| 285 ) and self.stream.look().test("name:context"): | |
| 286 node.with_context = next(self.stream).value == "with" | |
| 287 self.stream.skip() | |
| 288 else: | |
| 289 node.with_context = default | |
| 290 return node | |
| 291 | |
| 292 def parse_include(self): | |
| 293 node = nodes.Include(lineno=next(self.stream).lineno) | |
| 294 node.template = self.parse_expression() | |
| 295 if self.stream.current.test("name:ignore") and self.stream.look().test( | |
| 296 "name:missing" | |
| 297 ): | |
| 298 node.ignore_missing = True | |
| 299 self.stream.skip(2) | |
| 300 else: | |
| 301 node.ignore_missing = False | |
| 302 return self.parse_import_context(node, True) | |
| 303 | |
| 304 def parse_import(self): | |
| 305 node = nodes.Import(lineno=next(self.stream).lineno) | |
| 306 node.template = self.parse_expression() | |
| 307 self.stream.expect("name:as") | |
| 308 node.target = self.parse_assign_target(name_only=True).name | |
| 309 return self.parse_import_context(node, False) | |
| 310 | |
| 311 def parse_from(self): | |
| 312 node = nodes.FromImport(lineno=next(self.stream).lineno) | |
| 313 node.template = self.parse_expression() | |
| 314 self.stream.expect("name:import") | |
| 315 node.names = [] | |
| 316 | |
| 317 def parse_context(): | |
| 318 if self.stream.current.value in ( | |
| 319 "with", | |
| 320 "without", | |
| 321 ) and self.stream.look().test("name:context"): | |
| 322 node.with_context = next(self.stream).value == "with" | |
| 323 self.stream.skip() | |
| 324 return True | |
| 325 return False | |
| 326 | |
| 327 while 1: | |
| 328 if node.names: | |
| 329 self.stream.expect("comma") | |
| 330 if self.stream.current.type == "name": | |
| 331 if parse_context(): | |
| 332 break | |
| 333 target = self.parse_assign_target(name_only=True) | |
| 334 if target.name.startswith("_"): | |
| 335 self.fail( | |
| 336 "names starting with an underline can not be imported", | |
| 337 target.lineno, | |
| 338 exc=TemplateAssertionError, | |
| 339 ) | |
| 340 if self.stream.skip_if("name:as"): | |
| 341 alias = self.parse_assign_target(name_only=True) | |
| 342 node.names.append((target.name, alias.name)) | |
| 343 else: | |
| 344 node.names.append(target.name) | |
| 345 if parse_context() or self.stream.current.type != "comma": | |
| 346 break | |
| 347 else: | |
| 348 self.stream.expect("name") | |
| 349 if not hasattr(node, "with_context"): | |
| 350 node.with_context = False | |
| 351 return node | |
| 352 | |
| 353 def parse_signature(self, node): | |
| 354 node.args = args = [] | |
| 355 node.defaults = defaults = [] | |
| 356 self.stream.expect("lparen") | |
| 357 while self.stream.current.type != "rparen": | |
| 358 if args: | |
| 359 self.stream.expect("comma") | |
| 360 arg = self.parse_assign_target(name_only=True) | |
| 361 arg.set_ctx("param") | |
| 362 if self.stream.skip_if("assign"): | |
| 363 defaults.append(self.parse_expression()) | |
| 364 elif defaults: | |
| 365 self.fail("non-default argument follows default argument") | |
| 366 args.append(arg) | |
| 367 self.stream.expect("rparen") | |
| 368 | |
| 369 def parse_call_block(self): | |
| 370 node = nodes.CallBlock(lineno=next(self.stream).lineno) | |
| 371 if self.stream.current.type == "lparen": | |
| 372 self.parse_signature(node) | |
| 373 else: | |
| 374 node.args = [] | |
| 375 node.defaults = [] | |
| 376 | |
| 377 node.call = self.parse_expression() | |
| 378 if not isinstance(node.call, nodes.Call): | |
| 379 self.fail("expected call", node.lineno) | |
| 380 node.body = self.parse_statements(("name:endcall",), drop_needle=True) | |
| 381 return node | |
| 382 | |
| 383 def parse_filter_block(self): | |
| 384 node = nodes.FilterBlock(lineno=next(self.stream).lineno) | |
| 385 node.filter = self.parse_filter(None, start_inline=True) | |
| 386 node.body = self.parse_statements(("name:endfilter",), drop_needle=True) | |
| 387 return node | |
| 388 | |
| 389 def parse_macro(self): | |
| 390 node = nodes.Macro(lineno=next(self.stream).lineno) | |
| 391 node.name = self.parse_assign_target(name_only=True).name | |
| 392 self.parse_signature(node) | |
| 393 node.body = self.parse_statements(("name:endmacro",), drop_needle=True) | |
| 394 return node | |
| 395 | |
| 396 def parse_print(self): | |
| 397 node = nodes.Output(lineno=next(self.stream).lineno) | |
| 398 node.nodes = [] | |
| 399 while self.stream.current.type != "block_end": | |
| 400 if node.nodes: | |
| 401 self.stream.expect("comma") | |
| 402 node.nodes.append(self.parse_expression()) | |
| 403 return node | |
| 404 | |
| 405 def parse_assign_target( | |
| 406 self, | |
| 407 with_tuple=True, | |
| 408 name_only=False, | |
| 409 extra_end_rules=None, | |
| 410 with_namespace=False, | |
| 411 ): | |
| 412 """Parse an assignment target. As Jinja allows assignments to | |
| 413 tuples, this function can parse all allowed assignment targets. Per | |
| 414 default assignments to tuples are parsed, that can be disable however | |
| 415 by setting `with_tuple` to `False`. If only assignments to names are | |
| 416 wanted `name_only` can be set to `True`. The `extra_end_rules` | |
| 417 parameter is forwarded to the tuple parsing function. If | |
| 418 `with_namespace` is enabled, a namespace assignment may be parsed. | |
| 419 """ | |
| 420 if with_namespace and self.stream.look().type == "dot": | |
| 421 token = self.stream.expect("name") | |
| 422 next(self.stream) # dot | |
| 423 attr = self.stream.expect("name") | |
| 424 target = nodes.NSRef(token.value, attr.value, lineno=token.lineno) | |
| 425 elif name_only: | |
| 426 token = self.stream.expect("name") | |
| 427 target = nodes.Name(token.value, "store", lineno=token.lineno) | |
| 428 else: | |
| 429 if with_tuple: | |
| 430 target = self.parse_tuple( | |
| 431 simplified=True, extra_end_rules=extra_end_rules | |
| 432 ) | |
| 433 else: | |
| 434 target = self.parse_primary() | |
| 435 target.set_ctx("store") | |
| 436 if not target.can_assign(): | |
| 437 self.fail( | |
| 438 "can't assign to %r" % target.__class__.__name__.lower(), target.lineno | |
| 439 ) | |
| 440 return target | |
| 441 | |
| 442 def parse_expression(self, with_condexpr=True): | |
| 443 """Parse an expression. Per default all expressions are parsed, if | |
| 444 the optional `with_condexpr` parameter is set to `False` conditional | |
| 445 expressions are not parsed. | |
| 446 """ | |
| 447 if with_condexpr: | |
| 448 return self.parse_condexpr() | |
| 449 return self.parse_or() | |
| 450 | |
| 451 def parse_condexpr(self): | |
| 452 lineno = self.stream.current.lineno | |
| 453 expr1 = self.parse_or() | |
| 454 while self.stream.skip_if("name:if"): | |
| 455 expr2 = self.parse_or() | |
| 456 if self.stream.skip_if("name:else"): | |
| 457 expr3 = self.parse_condexpr() | |
| 458 else: | |
| 459 expr3 = None | |
| 460 expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno) | |
| 461 lineno = self.stream.current.lineno | |
| 462 return expr1 | |
| 463 | |
| 464 def parse_or(self): | |
| 465 lineno = self.stream.current.lineno | |
| 466 left = self.parse_and() | |
| 467 while self.stream.skip_if("name:or"): | |
| 468 right = self.parse_and() | |
| 469 left = nodes.Or(left, right, lineno=lineno) | |
| 470 lineno = self.stream.current.lineno | |
| 471 return left | |
| 472 | |
| 473 def parse_and(self): | |
| 474 lineno = self.stream.current.lineno | |
| 475 left = self.parse_not() | |
| 476 while self.stream.skip_if("name:and"): | |
| 477 right = self.parse_not() | |
| 478 left = nodes.And(left, right, lineno=lineno) | |
| 479 lineno = self.stream.current.lineno | |
| 480 return left | |
| 481 | |
| 482 def parse_not(self): | |
| 483 if self.stream.current.test("name:not"): | |
| 484 lineno = next(self.stream).lineno | |
| 485 return nodes.Not(self.parse_not(), lineno=lineno) | |
| 486 return self.parse_compare() | |
| 487 | |
| 488 def parse_compare(self): | |
| 489 lineno = self.stream.current.lineno | |
| 490 expr = self.parse_math1() | |
| 491 ops = [] | |
| 492 while 1: | |
| 493 token_type = self.stream.current.type | |
| 494 if token_type in _compare_operators: | |
| 495 next(self.stream) | |
| 496 ops.append(nodes.Operand(token_type, self.parse_math1())) | |
| 497 elif self.stream.skip_if("name:in"): | |
| 498 ops.append(nodes.Operand("in", self.parse_math1())) | |
| 499 elif self.stream.current.test("name:not") and self.stream.look().test( | |
| 500 "name:in" | |
| 501 ): | |
| 502 self.stream.skip(2) | |
| 503 ops.append(nodes.Operand("notin", self.parse_math1())) | |
| 504 else: | |
| 505 break | |
| 506 lineno = self.stream.current.lineno | |
| 507 if not ops: | |
| 508 return expr | |
| 509 return nodes.Compare(expr, ops, lineno=lineno) | |
| 510 | |
| 511 def parse_math1(self): | |
| 512 lineno = self.stream.current.lineno | |
| 513 left = self.parse_concat() | |
| 514 while self.stream.current.type in ("add", "sub"): | |
| 515 cls = _math_nodes[self.stream.current.type] | |
| 516 next(self.stream) | |
| 517 right = self.parse_concat() | |
| 518 left = cls(left, right, lineno=lineno) | |
| 519 lineno = self.stream.current.lineno | |
| 520 return left | |
| 521 | |
| 522 def parse_concat(self): | |
| 523 lineno = self.stream.current.lineno | |
| 524 args = [self.parse_math2()] | |
| 525 while self.stream.current.type == "tilde": | |
| 526 next(self.stream) | |
| 527 args.append(self.parse_math2()) | |
| 528 if len(args) == 1: | |
| 529 return args[0] | |
| 530 return nodes.Concat(args, lineno=lineno) | |
| 531 | |
| 532 def parse_math2(self): | |
| 533 lineno = self.stream.current.lineno | |
| 534 left = self.parse_pow() | |
| 535 while self.stream.current.type in ("mul", "div", "floordiv", "mod"): | |
| 536 cls = _math_nodes[self.stream.current.type] | |
| 537 next(self.stream) | |
| 538 right = self.parse_pow() | |
| 539 left = cls(left, right, lineno=lineno) | |
| 540 lineno = self.stream.current.lineno | |
| 541 return left | |
| 542 | |
| 543 def parse_pow(self): | |
| 544 lineno = self.stream.current.lineno | |
| 545 left = self.parse_unary() | |
| 546 while self.stream.current.type == "pow": | |
| 547 next(self.stream) | |
| 548 right = self.parse_unary() | |
| 549 left = nodes.Pow(left, right, lineno=lineno) | |
| 550 lineno = self.stream.current.lineno | |
| 551 return left | |
| 552 | |
| 553 def parse_unary(self, with_filter=True): | |
| 554 token_type = self.stream.current.type | |
| 555 lineno = self.stream.current.lineno | |
| 556 if token_type == "sub": | |
| 557 next(self.stream) | |
| 558 node = nodes.Neg(self.parse_unary(False), lineno=lineno) | |
| 559 elif token_type == "add": | |
| 560 next(self.stream) | |
| 561 node = nodes.Pos(self.parse_unary(False), lineno=lineno) | |
| 562 else: | |
| 563 node = self.parse_primary() | |
| 564 node = self.parse_postfix(node) | |
| 565 if with_filter: | |
| 566 node = self.parse_filter_expr(node) | |
| 567 return node | |
| 568 | |
| 569 def parse_primary(self): | |
| 570 token = self.stream.current | |
| 571 if token.type == "name": | |
| 572 if token.value in ("true", "false", "True", "False"): | |
| 573 node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno) | |
| 574 elif token.value in ("none", "None"): | |
| 575 node = nodes.Const(None, lineno=token.lineno) | |
| 576 else: | |
| 577 node = nodes.Name(token.value, "load", lineno=token.lineno) | |
| 578 next(self.stream) | |
| 579 elif token.type == "string": | |
| 580 next(self.stream) | |
| 581 buf = [token.value] | |
| 582 lineno = token.lineno | |
| 583 while self.stream.current.type == "string": | |
| 584 buf.append(self.stream.current.value) | |
| 585 next(self.stream) | |
| 586 node = nodes.Const("".join(buf), lineno=lineno) | |
| 587 elif token.type in ("integer", "float"): | |
| 588 next(self.stream) | |
| 589 node = nodes.Const(token.value, lineno=token.lineno) | |
| 590 elif token.type == "lparen": | |
| 591 next(self.stream) | |
| 592 node = self.parse_tuple(explicit_parentheses=True) | |
| 593 self.stream.expect("rparen") | |
| 594 elif token.type == "lbracket": | |
| 595 node = self.parse_list() | |
| 596 elif token.type == "lbrace": | |
| 597 node = self.parse_dict() | |
| 598 else: | |
| 599 self.fail("unexpected '%s'" % describe_token(token), token.lineno) | |
| 600 return node | |
| 601 | |
| 602 def parse_tuple( | |
| 603 self, | |
| 604 simplified=False, | |
| 605 with_condexpr=True, | |
| 606 extra_end_rules=None, | |
| 607 explicit_parentheses=False, | |
| 608 ): | |
| 609 """Works like `parse_expression` but if multiple expressions are | |
| 610 delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created. | |
| 611 This method could also return a regular expression instead of a tuple | |
| 612 if no commas where found. | |
| 613 | |
| 614 The default parsing mode is a full tuple. If `simplified` is `True` | |
| 615 only names and literals are parsed. The `no_condexpr` parameter is | |
| 616 forwarded to :meth:`parse_expression`. | |
| 617 | |
| 618 Because tuples do not require delimiters and may end in a bogus comma | |
| 619 an extra hint is needed that marks the end of a tuple. For example | |
| 620 for loops support tuples between `for` and `in`. In that case the | |
| 621 `extra_end_rules` is set to ``['name:in']``. | |
| 622 | |
| 623 `explicit_parentheses` is true if the parsing was triggered by an | |
| 624 expression in parentheses. This is used to figure out if an empty | |
| 625 tuple is a valid expression or not. | |
| 626 """ | |
| 627 lineno = self.stream.current.lineno | |
| 628 if simplified: | |
| 629 parse = self.parse_primary | |
| 630 elif with_condexpr: | |
| 631 parse = self.parse_expression | |
| 632 else: | |
| 633 | |
| 634 def parse(): | |
| 635 return self.parse_expression(with_condexpr=False) | |
| 636 | |
| 637 args = [] | |
| 638 is_tuple = False | |
| 639 while 1: | |
| 640 if args: | |
| 641 self.stream.expect("comma") | |
| 642 if self.is_tuple_end(extra_end_rules): | |
| 643 break | |
| 644 args.append(parse()) | |
| 645 if self.stream.current.type == "comma": | |
| 646 is_tuple = True | |
| 647 else: | |
| 648 break | |
| 649 lineno = self.stream.current.lineno | |
| 650 | |
| 651 if not is_tuple: | |
| 652 if args: | |
| 653 return args[0] | |
| 654 | |
| 655 # if we don't have explicit parentheses, an empty tuple is | |
| 656 # not a valid expression. This would mean nothing (literally | |
| 657 # nothing) in the spot of an expression would be an empty | |
| 658 # tuple. | |
| 659 if not explicit_parentheses: | |
| 660 self.fail( | |
| 661 "Expected an expression, got '%s'" | |
| 662 % describe_token(self.stream.current) | |
| 663 ) | |
| 664 | |
| 665 return nodes.Tuple(args, "load", lineno=lineno) | |
| 666 | |
| 667 def parse_list(self): | |
| 668 token = self.stream.expect("lbracket") | |
| 669 items = [] | |
| 670 while self.stream.current.type != "rbracket": | |
| 671 if items: | |
| 672 self.stream.expect("comma") | |
| 673 if self.stream.current.type == "rbracket": | |
| 674 break | |
| 675 items.append(self.parse_expression()) | |
| 676 self.stream.expect("rbracket") | |
| 677 return nodes.List(items, lineno=token.lineno) | |
| 678 | |
| 679 def parse_dict(self): | |
| 680 token = self.stream.expect("lbrace") | |
| 681 items = [] | |
| 682 while self.stream.current.type != "rbrace": | |
| 683 if items: | |
| 684 self.stream.expect("comma") | |
| 685 if self.stream.current.type == "rbrace": | |
| 686 break | |
| 687 key = self.parse_expression() | |
| 688 self.stream.expect("colon") | |
| 689 value = self.parse_expression() | |
| 690 items.append(nodes.Pair(key, value, lineno=key.lineno)) | |
| 691 self.stream.expect("rbrace") | |
| 692 return nodes.Dict(items, lineno=token.lineno) | |
| 693 | |
| 694 def parse_postfix(self, node): | |
| 695 while 1: | |
| 696 token_type = self.stream.current.type | |
| 697 if token_type == "dot" or token_type == "lbracket": | |
| 698 node = self.parse_subscript(node) | |
| 699 # calls are valid both after postfix expressions (getattr | |
| 700 # and getitem) as well as filters and tests | |
| 701 elif token_type == "lparen": | |
| 702 node = self.parse_call(node) | |
| 703 else: | |
| 704 break | |
| 705 return node | |
| 706 | |
| 707 def parse_filter_expr(self, node): | |
| 708 while 1: | |
| 709 token_type = self.stream.current.type | |
| 710 if token_type == "pipe": | |
| 711 node = self.parse_filter(node) | |
| 712 elif token_type == "name" and self.stream.current.value == "is": | |
| 713 node = self.parse_test(node) | |
| 714 # calls are valid both after postfix expressions (getattr | |
| 715 # and getitem) as well as filters and tests | |
| 716 elif token_type == "lparen": | |
| 717 node = self.parse_call(node) | |
| 718 else: | |
| 719 break | |
| 720 return node | |
| 721 | |
| 722 def parse_subscript(self, node): | |
| 723 token = next(self.stream) | |
| 724 if token.type == "dot": | |
| 725 attr_token = self.stream.current | |
| 726 next(self.stream) | |
| 727 if attr_token.type == "name": | |
| 728 return nodes.Getattr( | |
| 729 node, attr_token.value, "load", lineno=token.lineno | |
| 730 ) | |
| 731 elif attr_token.type != "integer": | |
| 732 self.fail("expected name or number", attr_token.lineno) | |
| 733 arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) | |
| 734 return nodes.Getitem(node, arg, "load", lineno=token.lineno) | |
| 735 if token.type == "lbracket": | |
| 736 args = [] | |
| 737 while self.stream.current.type != "rbracket": | |
| 738 if args: | |
| 739 self.stream.expect("comma") | |
| 740 args.append(self.parse_subscribed()) | |
| 741 self.stream.expect("rbracket") | |
| 742 if len(args) == 1: | |
| 743 arg = args[0] | |
| 744 else: | |
| 745 arg = nodes.Tuple(args, "load", lineno=token.lineno) | |
| 746 return nodes.Getitem(node, arg, "load", lineno=token.lineno) | |
| 747 self.fail("expected subscript expression", token.lineno) | |
| 748 | |
| 749 def parse_subscribed(self): | |
| 750 lineno = self.stream.current.lineno | |
| 751 | |
| 752 if self.stream.current.type == "colon": | |
| 753 next(self.stream) | |
| 754 args = [None] | |
| 755 else: | |
| 756 node = self.parse_expression() | |
| 757 if self.stream.current.type != "colon": | |
| 758 return node | |
| 759 next(self.stream) | |
| 760 args = [node] | |
| 761 | |
| 762 if self.stream.current.type == "colon": | |
| 763 args.append(None) | |
| 764 elif self.stream.current.type not in ("rbracket", "comma"): | |
| 765 args.append(self.parse_expression()) | |
| 766 else: | |
| 767 args.append(None) | |
| 768 | |
| 769 if self.stream.current.type == "colon": | |
| 770 next(self.stream) | |
| 771 if self.stream.current.type not in ("rbracket", "comma"): | |
| 772 args.append(self.parse_expression()) | |
| 773 else: | |
| 774 args.append(None) | |
| 775 else: | |
| 776 args.append(None) | |
| 777 | |
| 778 return nodes.Slice(lineno=lineno, *args) | |
| 779 | |
| 780 def parse_call(self, node): | |
| 781 token = self.stream.expect("lparen") | |
| 782 args = [] | |
| 783 kwargs = [] | |
| 784 dyn_args = dyn_kwargs = None | |
| 785 require_comma = False | |
| 786 | |
| 787 def ensure(expr): | |
| 788 if not expr: | |
| 789 self.fail("invalid syntax for function call expression", token.lineno) | |
| 790 | |
| 791 while self.stream.current.type != "rparen": | |
| 792 if require_comma: | |
| 793 self.stream.expect("comma") | |
| 794 # support for trailing comma | |
| 795 if self.stream.current.type == "rparen": | |
| 796 break | |
| 797 if self.stream.current.type == "mul": | |
| 798 ensure(dyn_args is None and dyn_kwargs is None) | |
| 799 next(self.stream) | |
| 800 dyn_args = self.parse_expression() | |
| 801 elif self.stream.current.type == "pow": | |
| 802 ensure(dyn_kwargs is None) | |
| 803 next(self.stream) | |
| 804 dyn_kwargs = self.parse_expression() | |
| 805 else: | |
| 806 if ( | |
| 807 self.stream.current.type == "name" | |
| 808 and self.stream.look().type == "assign" | |
| 809 ): | |
| 810 # Parsing a kwarg | |
| 811 ensure(dyn_kwargs is None) | |
| 812 key = self.stream.current.value | |
| 813 self.stream.skip(2) | |
| 814 value = self.parse_expression() | |
| 815 kwargs.append(nodes.Keyword(key, value, lineno=value.lineno)) | |
| 816 else: | |
| 817 # Parsing an arg | |
| 818 ensure(dyn_args is None and dyn_kwargs is None and not kwargs) | |
| 819 args.append(self.parse_expression()) | |
| 820 | |
| 821 require_comma = True | |
| 822 self.stream.expect("rparen") | |
| 823 | |
| 824 if node is None: | |
| 825 return args, kwargs, dyn_args, dyn_kwargs | |
| 826 return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) | |
| 827 | |
| 828 def parse_filter(self, node, start_inline=False): | |
| 829 while self.stream.current.type == "pipe" or start_inline: | |
| 830 if not start_inline: | |
| 831 next(self.stream) | |
| 832 token = self.stream.expect("name") | |
| 833 name = token.value | |
| 834 while self.stream.current.type == "dot": | |
| 835 next(self.stream) | |
| 836 name += "." + self.stream.expect("name").value | |
| 837 if self.stream.current.type == "lparen": | |
| 838 args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) | |
| 839 else: | |
| 840 args = [] | |
| 841 kwargs = [] | |
| 842 dyn_args = dyn_kwargs = None | |
| 843 node = nodes.Filter( | |
| 844 node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno | |
| 845 ) | |
| 846 start_inline = False | |
| 847 return node | |
| 848 | |
| 849 def parse_test(self, node): | |
| 850 token = next(self.stream) | |
| 851 if self.stream.current.test("name:not"): | |
| 852 next(self.stream) | |
| 853 negated = True | |
| 854 else: | |
| 855 negated = False | |
| 856 name = self.stream.expect("name").value | |
| 857 while self.stream.current.type == "dot": | |
| 858 next(self.stream) | |
| 859 name += "." + self.stream.expect("name").value | |
| 860 dyn_args = dyn_kwargs = None | |
| 861 kwargs = [] | |
| 862 if self.stream.current.type == "lparen": | |
| 863 args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) | |
| 864 elif self.stream.current.type in ( | |
| 865 "name", | |
| 866 "string", | |
| 867 "integer", | |
| 868 "float", | |
| 869 "lparen", | |
| 870 "lbracket", | |
| 871 "lbrace", | |
| 872 ) and not self.stream.current.test_any("name:else", "name:or", "name:and"): | |
| 873 if self.stream.current.test("name:is"): | |
| 874 self.fail("You cannot chain multiple tests with is") | |
| 875 arg_node = self.parse_primary() | |
| 876 arg_node = self.parse_postfix(arg_node) | |
| 877 args = [arg_node] | |
| 878 else: | |
| 879 args = [] | |
| 880 node = nodes.Test( | |
| 881 node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno | |
| 882 ) | |
| 883 if negated: | |
| 884 node = nodes.Not(node, lineno=token.lineno) | |
| 885 return node | |
| 886 | |
| 887 def subparse(self, end_tokens=None): | |
| 888 body = [] | |
| 889 data_buffer = [] | |
| 890 add_data = data_buffer.append | |
| 891 | |
| 892 if end_tokens is not None: | |
| 893 self._end_token_stack.append(end_tokens) | |
| 894 | |
| 895 def flush_data(): | |
| 896 if data_buffer: | |
| 897 lineno = data_buffer[0].lineno | |
| 898 body.append(nodes.Output(data_buffer[:], lineno=lineno)) | |
| 899 del data_buffer[:] | |
| 900 | |
| 901 try: | |
| 902 while self.stream: | |
| 903 token = self.stream.current | |
| 904 if token.type == "data": | |
| 905 if token.value: | |
| 906 add_data(nodes.TemplateData(token.value, lineno=token.lineno)) | |
| 907 next(self.stream) | |
| 908 elif token.type == "variable_begin": | |
| 909 next(self.stream) | |
| 910 add_data(self.parse_tuple(with_condexpr=True)) | |
| 911 self.stream.expect("variable_end") | |
| 912 elif token.type == "block_begin": | |
| 913 flush_data() | |
| 914 next(self.stream) | |
| 915 if end_tokens is not None and self.stream.current.test_any( | |
| 916 *end_tokens | |
| 917 ): | |
| 918 return body | |
| 919 rv = self.parse_statement() | |
| 920 if isinstance(rv, list): | |
| 921 body.extend(rv) | |
| 922 else: | |
| 923 body.append(rv) | |
| 924 self.stream.expect("block_end") | |
| 925 else: | |
| 926 raise AssertionError("internal parsing error") | |
| 927 | |
| 928 flush_data() | |
| 929 finally: | |
| 930 if end_tokens is not None: | |
| 931 self._end_token_stack.pop() | |
| 932 | |
| 933 return body | |
| 934 | |
| 935 def parse(self): | |
| 936 """Parse the whole template into a `Template` node.""" | |
| 937 result = nodes.Template(self.subparse(), lineno=1) | |
| 938 result.set_environment(self.environment) | |
| 939 return result |
