From fb7f0437323ba836c68947d38f3604c3336e3a9b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 29 May 2019 18:18:04 -0400 Subject: Use tox / zimports Change-Id: Ia047c7052a6d242c2cf1c7a83981f1e38ea4d928 --- mako/__init__.py | 2 +- mako/_ast_util.py | 388 +++++++++++++------------ mako/ast.py | 62 ++-- mako/cache.py | 16 +- mako/cmd.py | 42 ++- mako/codegen.py | 700 +++++++++++++++++++++++++--------------------- mako/compat.py | 64 +++-- mako/exceptions.py | 96 ++++--- mako/ext/autohandler.py | 38 +-- mako/ext/babelplugin.py | 28 +- mako/ext/beaker_cache.py | 32 +-- mako/ext/extract.py | 47 ++-- mako/ext/linguaplugin.py | 52 ++-- mako/ext/preprocessors.py | 2 +- mako/ext/pygmentplugin.py | 166 ++++++----- mako/ext/turbogears.py | 17 +- mako/filters.py | 92 +++--- mako/lexer.py | 273 ++++++++++-------- mako/lookup.py | 163 +++++------ mako/parsetree.py | 381 ++++++++++++++----------- mako/pygen.py | 46 +-- mako/pyparser.py | 77 ++--- mako/runtime.py | 183 +++++++----- mako/template.py | 352 ++++++++++++----------- mako/util.py | 91 +++--- 25 files changed, 1937 insertions(+), 1473 deletions(-) (limited to 'mako') diff --git a/mako/__init__.py b/mako/__init__.py index 2435b72..90a4b36 100644 --- a/mako/__init__.py +++ b/mako/__init__.py @@ -5,4 +5,4 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php -__version__ = '1.0.11' +__version__ = "1.0.11" diff --git a/mako/_ast_util.py b/mako/_ast_util.py index e2c7d24..be95bfb 100644 --- a/mako/_ast_util.py +++ b/mako/_ast_util.py @@ -30,47 +30,77 @@ :copyright: Copyright 2008 by Armin Ronacher. :license: Python License. """ -from _ast import * # noqa +from _ast import Add +from _ast import And +from _ast import AST +from _ast import BitAnd +from _ast import BitOr +from _ast import BitXor +from _ast import ClassDef +from _ast import Div +from _ast import Eq +from _ast import Expression +from _ast import FloorDiv +from _ast import FunctionDef +from _ast import Gt +from _ast import GtE +from _ast import If +from _ast import In +from _ast import Interactive +from _ast import Invert +from _ast import Is +from _ast import IsNot +from _ast import LShift +from _ast import Lt +from _ast import LtE +from _ast import Mod +from _ast import mod +from _ast import Module +from _ast import Mult +from _ast import Name +from _ast import Not +from _ast import NotEq +from _ast import NotIn +from _ast import Or +from _ast import PyCF_ONLY_AST +from _ast import RShift +from _ast import Str +from _ast import Sub +from _ast import UAdd +from _ast import USub + from mako.compat import arg_stringname -BOOLOP_SYMBOLS = { - And: 'and', - Or: 'or' -} +BOOLOP_SYMBOLS = {And: "and", Or: "or"} BINOP_SYMBOLS = { - Add: '+', - Sub: '-', - Mult: '*', - Div: '/', - FloorDiv: '//', - Mod: '%', - LShift: '<<', - RShift: '>>', - BitOr: '|', - BitAnd: '&', - BitXor: '^' + Add: "+", + Sub: "-", + Mult: "*", + Div: "/", + FloorDiv: "//", + Mod: "%", + LShift: "<<", + RShift: ">>", + BitOr: "|", + BitAnd: "&", + BitXor: "^", } CMPOP_SYMBOLS = { - Eq: '==', - Gt: '>', - GtE: '>=', - In: 'in', - Is: 'is', - IsNot: 'is not', - Lt: '<', - LtE: '<=', - NotEq: '!=', - NotIn: 'not in' + Eq: "==", + Gt: ">", + GtE: ">=", + In: "in", + Is: "is", + IsNot: "is not", + Lt: "<", + LtE: "<=", + NotEq: "!=", + NotIn: "not in", } -UNARYOP_SYMBOLS = { - Invert: '~', - Not: 'not', - UAdd: '+', - USub: '-' -} +UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"} ALL_SYMBOLS = {} ALL_SYMBOLS.update(BOOLOP_SYMBOLS) @@ -79,12 +109,12 @@ ALL_SYMBOLS.update(CMPOP_SYMBOLS) ALL_SYMBOLS.update(UNARYOP_SYMBOLS) -def parse(expr, filename='', mode='exec'): +def parse(expr, filename="", mode="exec"): """Parse an expression into an AST node.""" return compile(expr, filename, mode, PyCF_ONLY_AST) -def to_source(node, indent_with=' ' * 4): +def to_source(node, indent_with=" " * 4): """ This function can convert a node tree back into python sourcecode. This is useful for debugging purposes, especially if you're dealing with custom @@ -101,7 +131,7 @@ def to_source(node, indent_with=' ' * 4): """ generator = SourceGenerator(indent_with) generator.visit(node) - return ''.join(generator.result) + return "".join(generator.result) def dump(node): @@ -109,16 +139,21 @@ def dump(node): A very verbose representation of the node passed. This is useful for debugging purposes. """ + def _format(node): if isinstance(node, AST): - return '%s(%s)' % (node.__class__.__name__, - ', '.join('%s=%s' % (a, _format(b)) - for a, b in iter_fields(node))) + return "%s(%s)" % ( + node.__class__.__name__, + ", ".join( + "%s=%s" % (a, _format(b)) for a, b in iter_fields(node) + ), + ) elif isinstance(node, list): - return '[%s]' % ', '.join(_format(x) for x in node) + return "[%s]" % ", ".join(_format(x) for x in node) return repr(node) + if not isinstance(node, AST): - raise TypeError('expected AST, got %r' % node.__class__.__name__) + raise TypeError("expected AST, got %r" % node.__class__.__name__) return _format(node) @@ -127,9 +162,12 @@ def copy_location(new_node, old_node): Copy the source location hint (`lineno` and `col_offset`) from the old to the new node if possible and return the new one. """ - for attr in 'lineno', 'col_offset': - if attr in old_node._attributes and attr in new_node._attributes \ - and hasattr(old_node, attr): + for attr in "lineno", "col_offset": + if ( + attr in old_node._attributes + and attr in new_node._attributes + and hasattr(old_node, attr) + ): setattr(new_node, attr, getattr(old_node, attr)) return new_node @@ -146,19 +184,21 @@ def fix_missing_locations(node): Unlike `copy_location` this works recursive and won't touch nodes that already have a location information. """ + def _fix(node, lineno, col_offset): - if 'lineno' in node._attributes: - if not hasattr(node, 'lineno'): + if "lineno" in node._attributes: + if not hasattr(node, "lineno"): node.lineno = lineno else: lineno = node.lineno - if 'col_offset' in node._attributes: - if not hasattr(node, 'col_offset'): + if "col_offset" in node._attributes: + if not hasattr(node, "col_offset"): node.col_offset = col_offset else: col_offset = node.col_offset for child in iter_child_nodes(node): _fix(child, lineno, col_offset) + _fix(node, 1, 0) return node @@ -170,14 +210,14 @@ def increment_lineno(node, n=1): file. """ for node in zip((node,), walk(node)): - if 'lineno' in node._attributes: - node.lineno = getattr(node, 'lineno', 0) + n + if "lineno" in node._attributes: + node.lineno = getattr(node, "lineno", 0) + n def iter_fields(node): """Iterate over all fields of a node, only yielding existing fields.""" # CPython 2.5 compat - if not hasattr(node, '_fields') or not node._fields: + if not hasattr(node, "_fields") or not node._fields: return for field in node._fields: try: @@ -213,11 +253,10 @@ def get_compile_mode(node): node (`Expression`, `Module` etc.) a `TypeError` is thrown. """ if not isinstance(node, mod): - raise TypeError('expected mod node, got %r' % node.__class__.__name__) - return { - Expression: 'eval', - Interactive: 'single' - }.get(node.__class__, 'expr') + raise TypeError("expected mod node, got %r" % node.__class__.__name__) + return {Expression: "eval", Interactive: "single"}.get( + node.__class__, "expr" + ) def get_docstring(node): @@ -238,6 +277,7 @@ def walk(node): place and don't care about the context or the order the nodes are returned. """ from collections import deque + todo = deque([node]) while todo: node = todo.popleft() @@ -269,7 +309,7 @@ class NodeVisitor(object): exists for this node. In that case the generic visit function is used instead. """ - method = 'visit_' + node.__class__.__name__ + method = "visit_" + node.__class__.__name__ return getattr(self, method, None) def visit(self, node): @@ -367,7 +407,7 @@ class SourceGenerator(NodeVisitor): def write(self, x): if self.new_lines: if self.result: - self.result.append('\n' * self.new_lines) + self.result.append("\n" * self.new_lines) self.result.append(self.indent_with * self.indentation) self.new_lines = 0 self.result.append(x) @@ -386,7 +426,7 @@ class SourceGenerator(NodeVisitor): self.body(node.body) if node.orelse: self.newline() - self.write('else:') + self.write("else:") self.body(node.orelse) def signature(self, node): @@ -394,7 +434,7 @@ class SourceGenerator(NodeVisitor): def write_comma(): if want_comma: - self.write(', ') + self.write(", ") else: want_comma.append(True) @@ -403,19 +443,19 @@ class SourceGenerator(NodeVisitor): write_comma() self.visit(arg) if default is not None: - self.write('=') + self.write("=") self.visit(default) if node.vararg is not None: write_comma() - self.write('*' + arg_stringname(node.vararg)) + self.write("*" + arg_stringname(node.vararg)) if node.kwarg is not None: write_comma() - self.write('**' + arg_stringname(node.kwarg)) + self.write("**" + arg_stringname(node.kwarg)) def decorators(self, node): for decorator in node.decorator_list: self.newline() - self.write('@') + self.write("@") self.visit(decorator) # Statements @@ -424,29 +464,29 @@ class SourceGenerator(NodeVisitor): self.newline() for idx, target in enumerate(node.targets): if idx: - self.write(', ') + self.write(", ") self.visit(target) - self.write(' = ') + self.write(" = ") self.visit(node.value) def visit_AugAssign(self, node): self.newline() self.visit(node.target) - self.write(BINOP_SYMBOLS[type(node.op)] + '=') + self.write(BINOP_SYMBOLS[type(node.op)] + "=") self.visit(node.value) def visit_ImportFrom(self, node): self.newline() - self.write('from %s%s import ' % ('.' * node.level, node.module)) + self.write("from %s%s import " % ("." * node.level, node.module)) for idx, item in enumerate(node.names): if idx: - self.write(', ') + self.write(", ") self.write(item) def visit_Import(self, node): self.newline() for item in node.names: - self.write('import ') + self.write("import ") self.visit(item) def visit_Expr(self, node): @@ -457,9 +497,9 @@ class SourceGenerator(NodeVisitor): self.newline(n=2) self.decorators(node) self.newline() - self.write('def %s(' % node.name) + self.write("def %s(" % node.name) self.signature(node.args) - self.write('):') + self.write("):") self.body(node.body) def visit_ClassDef(self, node): @@ -467,200 +507,200 @@ class SourceGenerator(NodeVisitor): def paren_or_comma(): if have_args: - self.write(', ') + self.write(", ") else: have_args.append(True) - self.write('(') + self.write("(") self.newline(n=3) self.decorators(node) self.newline() - self.write('class %s' % node.name) + self.write("class %s" % node.name) for base in node.bases: paren_or_comma() self.visit(base) # XXX: the if here is used to keep this module compatible # with python 2.6. - if hasattr(node, 'keywords'): + if hasattr(node, "keywords"): for keyword in node.keywords: paren_or_comma() - self.write(keyword.arg + '=') + self.write(keyword.arg + "=") self.visit(keyword.value) if getattr(node, "starargs", None): paren_or_comma() - self.write('*') + self.write("*") self.visit(node.starargs) if getattr(node, "kwargs", None): paren_or_comma() - self.write('**') + self.write("**") self.visit(node.kwargs) - self.write(have_args and '):' or ':') + self.write(have_args and "):" or ":") self.body(node.body) def visit_If(self, node): self.newline() - self.write('if ') + self.write("if ") self.visit(node.test) - self.write(':') + self.write(":") self.body(node.body) while True: else_ = node.orelse if len(else_) == 1 and isinstance(else_[0], If): node = else_[0] self.newline() - self.write('elif ') + self.write("elif ") self.visit(node.test) - self.write(':') + self.write(":") self.body(node.body) else: self.newline() - self.write('else:') + self.write("else:") self.body(else_) break def visit_For(self, node): self.newline() - self.write('for ') + self.write("for ") self.visit(node.target) - self.write(' in ') + self.write(" in ") self.visit(node.iter) - self.write(':') + self.write(":") self.body_or_else(node) def visit_While(self, node): self.newline() - self.write('while ') + self.write("while ") self.visit(node.test) - self.write(':') + self.write(":") self.body_or_else(node) def visit_With(self, node): self.newline() - self.write('with ') + self.write("with ") self.visit(node.context_expr) if node.optional_vars is not None: - self.write(' as ') + self.write(" as ") self.visit(node.optional_vars) - self.write(':') + self.write(":") self.body(node.body) def visit_Pass(self, node): self.newline() - self.write('pass') + self.write("pass") def visit_Print(self, node): # XXX: python 2.6 only self.newline() - self.write('print ') + self.write("print ") want_comma = False if node.dest is not None: - self.write(' >> ') + self.write(" >> ") self.visit(node.dest) want_comma = True for value in node.values: if want_comma: - self.write(', ') + self.write(", ") self.visit(value) want_comma = True if not node.nl: - self.write(',') + self.write(",") def visit_Delete(self, node): self.newline() - self.write('del ') + self.write("del ") for idx, target in enumerate(node): if idx: - self.write(', ') + self.write(", ") self.visit(target) def visit_TryExcept(self, node): self.newline() - self.write('try:') + self.write("try:") self.body(node.body) for handler in node.handlers: self.visit(handler) def visit_TryFinally(self, node): self.newline() - self.write('try:') + self.write("try:") self.body(node.body) self.newline() - self.write('finally:') + self.write("finally:") self.body(node.finalbody) def visit_Global(self, node): self.newline() - self.write('global ' + ', '.join(node.names)) + self.write("global " + ", ".join(node.names)) def visit_Nonlocal(self, node): self.newline() - self.write('nonlocal ' + ', '.join(node.names)) + self.write("nonlocal " + ", ".join(node.names)) def visit_Return(self, node): self.newline() - self.write('return ') + self.write("return ") self.visit(node.value) def visit_Break(self, node): self.newline() - self.write('break') + self.write("break") def visit_Continue(self, node): self.newline() - self.write('continue') + self.write("continue") def visit_Raise(self, node): # XXX: Python 2.6 / 3.0 compatibility self.newline() - self.write('raise') - if hasattr(node, 'exc') and node.exc is not None: - self.write(' ') + self.write("raise") + if hasattr(node, "exc") and node.exc is not None: + self.write(" ") self.visit(node.exc) if node.cause is not None: - self.write(' from ') + self.write(" from ") self.visit(node.cause) - elif hasattr(node, 'type') and node.type is not None: + elif hasattr(node, "type") and node.type is not None: self.visit(node.type) if node.inst is not None: - self.write(', ') + self.write(", ") self.visit(node.inst) if node.tback is not None: - self.write(', ') + self.write(", ") self.visit(node.tback) # Expressions def visit_Attribute(self, node): self.visit(node.value) - self.write('.' + node.attr) + self.write("." + node.attr) def visit_Call(self, node): want_comma = [] def write_comma(): if want_comma: - self.write(', ') + self.write(", ") else: want_comma.append(True) self.visit(node.func) - self.write('(') + self.write("(") for arg in node.args: write_comma() self.visit(arg) for keyword in node.keywords: write_comma() - self.write(keyword.arg + '=') + self.write(keyword.arg + "=") self.visit(keyword.value) if getattr(node, "starargs", None): write_comma() - self.write('*') + self.write("*") self.visit(node.starargs) if getattr(node, "kwargs", None): write_comma() - self.write('**') + self.write("**") self.visit(node.kwargs) - self.write(')') + self.write(")") def visit_Name(self, node): self.write(node.id) @@ -685,105 +725,106 @@ class SourceGenerator(NodeVisitor): self.write(repr(node.value)) def visit_Tuple(self, node): - self.write('(') + self.write("(") idx = -1 for idx, item in enumerate(node.elts): if idx: - self.write(', ') + self.write(", ") self.visit(item) - self.write(idx and ')' or ',)') + self.write(idx and ")" or ",)") def sequence_visit(left, right): def visit(self, node): self.write(left) for idx, item in enumerate(node.elts): if idx: - self.write(', ') + self.write(", ") self.visit(item) self.write(right) + return visit - visit_List = sequence_visit('[', ']') - visit_Set = sequence_visit('{', '}') + visit_List = sequence_visit("[", "]") + visit_Set = sequence_visit("{", "}") del sequence_visit def visit_Dict(self, node): - self.write('{') + self.write("{") for idx, (key, value) in enumerate(zip(node.keys, node.values)): if idx: - self.write(', ') + self.write(", ") self.visit(key) - self.write(': ') + self.write(": ") self.visit(value) - self.write('}') + self.write("}") def visit_BinOp(self, node): - self.write('(') + self.write("(") self.visit(node.left) - self.write(' %s ' % BINOP_SYMBOLS[type(node.op)]) + self.write(" %s " % BINOP_SYMBOLS[type(node.op)]) self.visit(node.right) - self.write(')') + self.write(")") def visit_BoolOp(self, node): - self.write('(') + self.write("(") for idx, value in enumerate(node.values): if idx: - self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)]) + self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)]) self.visit(value) - self.write(')') + self.write(")") def visit_Compare(self, node): - self.write('(') + self.write("(") self.visit(node.left) for op, right in zip(node.ops, node.comparators): - self.write(' %s ' % CMPOP_SYMBOLS[type(op)]) + self.write(" %s " % CMPOP_SYMBOLS[type(op)]) self.visit(right) - self.write(')') + self.write(")") def visit_UnaryOp(self, node): - self.write('(') + self.write("(") op = UNARYOP_SYMBOLS[type(node.op)] self.write(op) - if op == 'not': - self.write(' ') + if op == "not": + self.write(" ") self.visit(node.operand) - self.write(')') + self.write(")") def visit_Subscript(self, node): self.visit(node.value) - self.write('[') + self.write("[") self.visit(node.slice) - self.write(']') + self.write("]") def visit_Slice(self, node): if node.lower is not None: self.visit(node.lower) - self.write(':') + self.write(":") if node.upper is not None: self.visit(node.upper) if node.step is not None: - self.write(':') - if not (isinstance(node.step, Name) and node.step.id == 'None'): + self.write(":") + if not (isinstance(node.step, Name) and node.step.id == "None"): self.visit(node.step) def visit_ExtSlice(self, node): for idx, item in node.dims: if idx: - self.write(', ') + self.write(", ") self.visit(item) def visit_Yield(self, node): - self.write('yield ') + self.write("yield ") self.visit(node.value) def visit_Lambda(self, node): - self.write('lambda ') + self.write("lambda ") self.signature(node.args) - self.write(': ') + self.write(": ") self.visit(node.body) def visit_Ellipsis(self, node): - self.write('Ellipsis') + self.write("Ellipsis") def generator_visit(left, right): def visit(self, node): @@ -792,64 +833,65 @@ class SourceGenerator(NodeVisitor): for comprehension in node.generators: self.visit(comprehension) self.write(right) + return visit - visit_ListComp = generator_visit('[', ']') - visit_GeneratorExp = generator_visit('(', ')') - visit_SetComp = generator_visit('{', '}') + visit_ListComp = generator_visit("[", "]") + visit_GeneratorExp = generator_visit("(", ")") + visit_SetComp = generator_visit("{", "}") del generator_visit def visit_DictComp(self, node): - self.write('{') + self.write("{") self.visit(node.key) - self.write(': ') + self.write(": ") self.visit(node.value) for comprehension in node.generators: self.visit(comprehension) - self.write('}') + self.write("}") def visit_IfExp(self, node): self.visit(node.body) - self.write(' if ') + self.write(" if ") self.visit(node.test) - self.write(' else ') + self.write(" else ") self.visit(node.orelse) def visit_Starred(self, node): - self.write('*') + self.write("*") self.visit(node.value) def visit_Repr(self, node): # XXX: python 2.6 only - self.write('`') + self.write("`") self.visit(node.value) - self.write('`') + self.write("`") # Helper Nodes def visit_alias(self, node): self.write(node.name) if node.asname is not None: - self.write(' as ' + node.asname) + self.write(" as " + node.asname) def visit_comprehension(self, node): - self.write(' for ') + self.write(" for ") self.visit(node.target) - self.write(' in ') + self.write(" in ") self.visit(node.iter) if node.ifs: for if_ in node.ifs: - self.write(' if ') + self.write(" if ") self.visit(if_) def visit_excepthandler(self, node): self.newline() - self.write('except') + self.write("except") if node.type is not None: - self.write(' ') + self.write(" ") self.visit(node.type) if node.name is not None: - self.write(' as ') + self.write(" as ") self.visit(node.name) - self.write(':') + self.write(":") self.body(node.body) diff --git a/mako/ast.py b/mako/ast.py index 8d2d150..2081d5d 100644 --- a/mako/ast.py +++ b/mako/ast.py @@ -7,9 +7,12 @@ """utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes""" -from mako import exceptions, pyparser, compat import re +from mako import compat +from mako import exceptions +from mako import pyparser + class PythonCode(object): @@ -72,36 +75,39 @@ class PythonFragment(PythonCode): """extends PythonCode to provide identifier lookups in partial control statements - e.g. + e.g.:: + for x in 5: elif y==9: except (MyException, e): - etc. + """ def __init__(self, code, **exception_kwargs): - m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S) + m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S) if not m: raise exceptions.CompileException( - "Fragment '%s' is not a partial control statement" % - code, **exception_kwargs) + "Fragment '%s' is not a partial control statement" % code, + **exception_kwargs + ) if m.group(3): - code = code[:m.start(3)] + code = code[: m.start(3)] (keyword, expr) = m.group(1, 2) - if keyword in ['for', 'if', 'while']: + if keyword in ["for", "if", "while"]: code = code + "pass" - elif keyword == 'try': + elif keyword == "try": code = code + "pass\nexcept:pass" - elif keyword == 'elif' or keyword == 'else': + elif keyword == "elif" or keyword == "else": code = "if False:pass\n" + code + "pass" - elif keyword == 'except': + elif keyword == "except": code = "try:pass\n" + code + "pass" - elif keyword == 'with': + elif keyword == "with": code = code + "pass" else: raise exceptions.CompileException( - "Unsupported control keyword: '%s'" % - keyword, **exception_kwargs) + "Unsupported control keyword: '%s'" % keyword, + **exception_kwargs + ) super(PythonFragment, self).__init__(code, **exception_kwargs) @@ -115,14 +121,17 @@ class FunctionDecl(object): f = pyparser.ParseFunc(self, **exception_kwargs) f.visit(expr) - if not hasattr(self, 'funcname'): + if not hasattr(self, "funcname"): raise exceptions.CompileException( "Code '%s' is not a function declaration" % code, - **exception_kwargs) + **exception_kwargs + ) if not allow_kwargs and self.kwargs: raise exceptions.CompileException( - "'**%s' keyword argument not allowed here" % - self.kwargnames[-1], **exception_kwargs) + "'**%s' keyword argument not allowed here" + % self.kwargnames[-1], + **exception_kwargs + ) def get_argument_expressions(self, as_call=False): """Return the argument declarations of this FunctionDecl as a printable @@ -157,8 +166,10 @@ class FunctionDecl(object): # `def foo(*, a=1, b, c=3)` namedecls.append(name) else: - namedecls.append("%s=%s" % ( - name, pyparser.ExpressionGenerator(default).value())) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) else: namedecls.append(name) @@ -171,8 +182,10 @@ class FunctionDecl(object): namedecls.append(name) else: default = defaults.pop(0) - namedecls.append("%s=%s" % ( - name, pyparser.ExpressionGenerator(default).value())) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) namedecls.reverse() return namedecls @@ -187,5 +200,6 @@ class FunctionArgs(FunctionDecl): """the argument portion of a function declaration""" def __init__(self, code, **kwargs): - super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, - **kwargs) + super(FunctionArgs, self).__init__( + "def ANON(%s):pass" % code, **kwargs + ) diff --git a/mako/cache.py b/mako/cache.py index 1af17dd..57821d7 100644 --- a/mako/cache.py +++ b/mako/cache.py @@ -4,7 +4,8 @@ # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from mako import compat, util +from mako import compat +from mako import util _cache_plugins = util.PluginLoader("mako.cache") @@ -90,9 +91,8 @@ class Cache(object): return creation_function() return self.impl.get_or_create( - key, - creation_function, - **self._get_cache_kw(kw, context)) + key, creation_function, **self._get_cache_kw(kw, context) + ) def set(self, key, value, **kw): r"""Place a value in the cache. @@ -141,7 +141,7 @@ class Cache(object): template. """ - self.invalidate('render_body', __M_defname='render_body') + self.invalidate("render_body", __M_defname="render_body") def invalidate_def(self, name): """Invalidate the cached content of a particular ``<%def>`` within this @@ -149,7 +149,7 @@ class Cache(object): """ - self.invalidate('render_%s' % name, __M_defname='render_%s' % name) + self.invalidate("render_%s" % name, __M_defname="render_%s" % name) def invalidate_closure(self, name): """Invalidate a nested ``<%def>`` within this template. @@ -165,7 +165,7 @@ class Cache(object): self.invalidate(name, __M_defname=name) def _get_cache_kw(self, kw, context): - defname = kw.pop('__M_defname', None) + defname = kw.pop("__M_defname", None) if not defname: tmpl_kw = self.template.cache_args.copy() tmpl_kw.update(kw) @@ -177,7 +177,7 @@ class Cache(object): self._def_regions[defname] = tmpl_kw if context and self.impl.pass_context: tmpl_kw = tmpl_kw.copy() - tmpl_kw.setdefault('context', context) + tmpl_kw.setdefault("context", context) return tmpl_kw diff --git a/mako/cmd.py b/mako/cmd.py index ff738c4..e51f021 100755 --- a/mako/cmd.py +++ b/mako/cmd.py @@ -4,11 +4,13 @@ # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from argparse import ArgumentParser -from os.path import isfile, dirname +from os.path import dirname +from os.path import isfile import sys -from mako.template import Template -from mako.lookup import TemplateLookup + from mako import exceptions +from mako.lookup import TemplateLookup +from mako.template import Template def varsplit(var): @@ -26,29 +28,39 @@ def cmdline(argv=None): parser = ArgumentParser() parser.add_argument( - "--var", default=[], action="append", - help="variable (can be used multiple times, use name=value)") + "--var", + default=[], + action="append", + help="variable (can be used multiple times, use name=value)", + ) parser.add_argument( - "--template-dir", default=[], action="append", + "--template-dir", + default=[], + action="append", help="Directory to use for template lookup (multiple " "directories may be provided). If not given then if the " "template is read from stdin, the value defaults to be " "the current directory, otherwise it defaults to be the " - "parent directory of the file provided.") + "parent directory of the file provided.", + ) parser.add_argument( - "--output-encoding", default=None, - help="force output encoding") - parser.add_argument('input', nargs='?', default='-') + "--output-encoding", default=None, help="force output encoding" + ) + parser.add_argument("input", nargs="?", default="-") options = parser.parse_args(argv) output_encoding = options.output_encoding - if options.input == '-': + if options.input == "-": lookup_dirs = options.template_dir or ["."] lookup = TemplateLookup(lookup_dirs) try: - template = Template(sys.stdin.read(), lookup=lookup, output_encoding=output_encoding) + template = Template( + sys.stdin.read(), + lookup=lookup, + output_encoding=output_encoding, + ) except: _exit() else: @@ -58,7 +70,11 @@ def cmdline(argv=None): lookup_dirs = options.template_dir or [dirname(filename)] lookup = TemplateLookup(lookup_dirs) try: - template = Template(filename=filename, lookup=lookup, output_encoding=output_encoding) + template = Template( + filename=filename, + lookup=lookup, + output_encoding=output_encoding, + ) except: _exit() diff --git a/mako/codegen.py b/mako/codegen.py index d4ecbe8..d7e48f9 100644 --- a/mako/codegen.py +++ b/mako/codegen.py @@ -7,11 +7,16 @@ """provides functionality for rendering a parsetree constructing into module source code.""" -import time import re -from mako.pygen import PythonPrinter -from mako import util, ast, parsetree, filters, exceptions +import time + +from mako import ast from mako import compat +from mako import exceptions +from mako import filters +from mako import parsetree +from mako import util +from mako.pygen import PythonPrinter MAGIC_NUMBER = 10 @@ -20,22 +25,24 @@ MAGIC_NUMBER = 10 # template and are not accessed via the # context itself TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"]) -RESERVED_NAMES = set(['context', 'loop']).union(TOPLEVEL_DECLARED) - - -def compile(node, - uri, - filename=None, - default_filters=None, - buffer_filters=None, - imports=None, - future_imports=None, - source_encoding=None, - generate_magic_comment=True, - disable_unicode=False, - strict_undefined=False, - enable_loop=True, - reserved_names=frozenset()): +RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED) + + +def compile( # noqa + node, + uri, + filename=None, + default_filters=None, + buffer_filters=None, + imports=None, + future_imports=None, + source_encoding=None, + generate_magic_comment=True, + disable_unicode=False, + strict_undefined=False, + enable_loop=True, + reserved_names=frozenset(), +): """Generate module source code given a parsetree node, uri, and optional source filename""" @@ -49,38 +56,43 @@ def compile(node, buf = util.FastEncodingBuffer() printer = PythonPrinter(buf) - _GenerateRenderMethod(printer, - _CompileContext(uri, - filename, - default_filters, - buffer_filters, - imports, - future_imports, - source_encoding, - generate_magic_comment, - disable_unicode, - strict_undefined, - enable_loop, - reserved_names), - node) + _GenerateRenderMethod( + printer, + _CompileContext( + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names, + ), + node, + ) return buf.getvalue() class _CompileContext(object): - - def __init__(self, - uri, - filename, - default_filters, - buffer_filters, - imports, - future_imports, - source_encoding, - generate_magic_comment, - disable_unicode, - strict_undefined, - enable_loop, - reserved_names): + def __init__( + self, + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names, + ): self.uri = uri self.filename = filename self.default_filters = default_filters @@ -113,12 +125,12 @@ class _GenerateRenderMethod(object): name = "render_%s" % node.funcname args = node.get_argument_expressions() filtered = len(node.filter_args.args) > 0 - buffered = eval(node.attributes.get('buffered', 'False')) - cached = eval(node.attributes.get('cached', 'False')) + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) defs = None pagetag = None if node.is_block and not node.is_anonymous: - args += ['**pageargs'] + args += ["**pageargs"] else: defs = self.write_toplevel() pagetag = self.compiler.pagetag @@ -126,25 +138,23 @@ class _GenerateRenderMethod(object): if pagetag is not None: args = pagetag.body_decl.get_argument_expressions() if not pagetag.body_decl.kwargs: - args += ['**pageargs'] - cached = eval(pagetag.attributes.get('cached', 'False')) + args += ["**pageargs"] + cached = eval(pagetag.attributes.get("cached", "False")) self.compiler.enable_loop = self.compiler.enable_loop or eval( - pagetag.attributes.get( - 'enable_loop', 'False') + pagetag.attributes.get("enable_loop", "False") ) else: - args = ['**pageargs'] + args = ["**pageargs"] cached = False buffered = filtered = False if args is None: - args = ['context'] + args = ["context"] else: - args = [a for a in ['context'] + args] + args = [a for a in ["context"] + args] self.write_render_callable( - pagetag or node, - name, args, - buffered, filtered, cached) + pagetag or node, name, args, buffered, filtered, cached + ) if defs is not None: for node in defs: @@ -154,8 +164,9 @@ class _GenerateRenderMethod(object): self.write_metadata_struct() def write_metadata_struct(self): - self.printer.source_map[self.printer.lineno] = \ - max(self.printer.source_map) + self.printer.source_map[self.printer.lineno] = max( + self.printer.source_map + ) struct = { "filename": self.compiler.filename, "uri": self.compiler.uri, @@ -164,10 +175,9 @@ class _GenerateRenderMethod(object): } self.printer.writelines( '"""', - '__M_BEGIN_METADATA', + "__M_BEGIN_METADATA", compat.json.dumps(struct), - '__M_END_METADATA\n' - '"""' + "__M_END_METADATA\n" '"""', ) @property @@ -186,7 +196,6 @@ class _GenerateRenderMethod(object): self.compiler.pagetag = None class FindTopLevel(object): - def visitInheritTag(s, node): inherit.append(node) @@ -214,14 +223,19 @@ class _GenerateRenderMethod(object): module_identifiers.declared = module_ident # module-level names, python code - if self.compiler.generate_magic_comment and \ - self.compiler.source_encoding: - self.printer.writeline("# -*- coding:%s -*-" % - self.compiler.source_encoding) + if ( + self.compiler.generate_magic_comment + and self.compiler.source_encoding + ): + self.printer.writeline( + "# -*- coding:%s -*-" % self.compiler.source_encoding + ) if self.compiler.future_imports: - self.printer.writeline("from __future__ import %s" % - (", ".join(self.compiler.future_imports),)) + self.printer.writeline( + "from __future__ import %s" + % (", ".join(self.compiler.future_imports),) + ) self.printer.writeline("from mako import runtime, filters, cache") self.printer.writeline("UNDEFINED = runtime.UNDEFINED") self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING") @@ -231,36 +245,41 @@ class _GenerateRenderMethod(object): self.printer.writeline("_modified_time = %r" % time.time()) self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop) self.printer.writeline( - "_template_filename = %r" % self.compiler.filename) + "_template_filename = %r" % self.compiler.filename + ) self.printer.writeline("_template_uri = %r" % self.compiler.uri) self.printer.writeline( - "_source_encoding = %r" % self.compiler.source_encoding) + "_source_encoding = %r" % self.compiler.source_encoding + ) if self.compiler.imports: - buf = '' + buf = "" for imp in self.compiler.imports: buf += imp + "\n" self.printer.writeline(imp) impcode = ast.PythonCode( buf, - source='', lineno=0, + source="", + lineno=0, pos=0, - filename='template defined imports') + filename="template defined imports", + ) else: impcode = None main_identifiers = module_identifiers.branch(self.node) - module_identifiers.topleveldefs = \ - module_identifiers.topleveldefs.\ - union(main_identifiers.topleveldefs) + mit = module_identifiers.topleveldefs + module_identifiers.topleveldefs = mit.union( + main_identifiers.topleveldefs + ) module_identifiers.declared.update(TOPLEVEL_DECLARED) if impcode: module_identifiers.declared.update(impcode.declared_identifiers) self.compiler.identifiers = module_identifiers - self.printer.writeline("_exports = %r" % - [n.name for n in - main_identifiers.topleveldefs.values()] - ) + self.printer.writeline( + "_exports = %r" + % [n.name for n in main_identifiers.topleveldefs.values()] + ) self.printer.write_blanks(2) if len(module_code): @@ -274,8 +293,9 @@ class _GenerateRenderMethod(object): return list(main_identifiers.topleveldefs.values()) - def write_render_callable(self, node, name, args, buffered, filtered, - cached): + def write_render_callable( + self, node, name, args, buffered, filtered, cached + ): """write a top-level render callable. this could be the main render() method or that of a top-level def.""" @@ -284,32 +304,38 @@ class _GenerateRenderMethod(object): decorator = node.decorator if decorator: self.printer.writeline( - "@runtime._decorate_toplevel(%s)" % decorator) + "@runtime._decorate_toplevel(%s)" % decorator + ) self.printer.start_source(node.lineno) self.printer.writelines( - "def %s(%s):" % (name, ','.join(args)), + "def %s(%s):" % (name, ",".join(args)), # push new frame, assign current frame to __M_caller "__M_caller = context.caller_stack._push_frame()", - "try:" + "try:", ) if buffered or filtered or cached: self.printer.writeline("context._push_buffer()") self.identifier_stack.append( - self.compiler.identifiers.branch(self.node)) - if (not self.in_def or self.node.is_block) and '**pageargs' in args: - self.identifier_stack[-1].argument_declared.add('pageargs') + self.compiler.identifiers.branch(self.node) + ) + if (not self.in_def or self.node.is_block) and "**pageargs" in args: + self.identifier_stack[-1].argument_declared.add("pageargs") if not self.in_def and ( - len(self.identifiers.locally_assigned) > 0 or - len(self.identifiers.argument_declared) > 0 + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 ): - self.printer.writeline("__M_locals = __M_dict_builtin(%s)" % - ','.join([ - "%s=%s" % (x, x) for x in - self.identifiers.argument_declared - ])) + self.printer.writeline( + "__M_locals = __M_dict_builtin(%s)" + % ",".join( + [ + "%s=%s" % (x, x) + for x in self.identifiers.argument_declared + ] + ) + ) self.write_variable_declares(self.identifiers, toplevel=True) @@ -321,9 +347,8 @@ class _GenerateRenderMethod(object): self.printer.write_blanks(2) if cached: self.write_cache_decorator( - node, name, - args, buffered, - self.identifiers, toplevel=True) + node, name, args, buffered, self.identifiers, toplevel=True + ) def write_module_code(self, module_code): """write module-level template code, i.e. that which @@ -338,9 +363,9 @@ class _GenerateRenderMethod(object): self.printer.writelines( "def _mako_inherit(template, context):", "_mako_generate_namespaces(context)", - "return runtime._inherit_from(context, %s, _template_uri)" % - (node.parsed_attributes['file']), - None + "return runtime._inherit_from(context, %s, _template_uri)" + % (node.parsed_attributes["file"]), + None, ) def write_namespaces(self, namespaces): @@ -352,12 +377,13 @@ class _GenerateRenderMethod(object): "except KeyError:", "_mako_generate_namespaces(context)", "return context.namespaces[(__name__, name)]", - None, None + None, + None, ) self.printer.writeline("def _mako_generate_namespaces(context):") for node in namespaces.values(): - if 'import' in node.attributes: + if "import" in node.attributes: self.compiler.has_ns_imports = True self.printer.start_source(node.lineno) if len(node.nodes): @@ -367,7 +393,6 @@ class _GenerateRenderMethod(object): self.in_def = True class NSDefVisitor(object): - def visitDefTag(s, node): s.visitDefOrBase(node) @@ -383,56 +408,54 @@ class _GenerateRenderMethod(object): ) self.write_inline_def(node, identifiers, nested=False) export.append(node.funcname) + vis = NSDefVisitor() for n in node.nodes: n.accept_visitor(vis) - self.printer.writeline("return [%s]" % (','.join(export))) + self.printer.writeline("return [%s]" % (",".join(export))) self.printer.writeline(None) self.in_def = False callable_name = "make_namespace()" else: callable_name = "None" - if 'file' in node.parsed_attributes: + if "file" in node.parsed_attributes: self.printer.writeline( "ns = runtime.TemplateNamespace(%r," " context._clean_inheritance_tokens()," " templateuri=%s, callables=%s, " - " calling_uri=_template_uri)" % - ( + " calling_uri=_template_uri)" + % ( node.name, - node.parsed_attributes.get('file', 'None'), + node.parsed_attributes.get("file", "None"), callable_name, ) ) - elif 'module' in node.parsed_attributes: + elif "module" in node.parsed_attributes: self.printer.writeline( "ns = runtime.ModuleNamespace(%r," " context._clean_inheritance_tokens()," " callables=%s, calling_uri=_template_uri," - " module=%s)" % - ( + " module=%s)" + % ( node.name, callable_name, - node.parsed_attributes.get( - 'module', 'None') + node.parsed_attributes.get("module", "None"), ) ) else: self.printer.writeline( "ns = runtime.Namespace(%r," " context._clean_inheritance_tokens()," - " callables=%s, calling_uri=_template_uri)" % - ( - node.name, - callable_name, - ) + " callables=%s, calling_uri=_template_uri)" + % (node.name, callable_name) ) - if eval(node.attributes.get('inheritable', "False")): + if eval(node.attributes.get("inheritable", "False")): self.printer.writeline("context['self'].%s = ns" % (node.name)) self.printer.writeline( - "context.namespaces[(__name__, %s)] = ns" % repr(node.name)) + "context.namespaces[(__name__, %s)] = ns" % repr(node.name) + ) self.printer.write_blanks(1) if not len(namespaces): self.printer.writeline("pass") @@ -468,7 +491,8 @@ class _GenerateRenderMethod(object): # write closure functions for closures that we define # right here to_write = to_write.union( - [c.funcname for c in identifiers.closuredefs.values()]) + [c.funcname for c in identifiers.closuredefs.values()] + ) # remove identifiers that are declared in the argument # signature of the callable @@ -492,23 +516,22 @@ class _GenerateRenderMethod(object): if limit is not None: to_write = to_write.intersection(limit) - if toplevel and getattr(self.compiler, 'has_ns_imports', False): + if toplevel and getattr(self.compiler, "has_ns_imports", False): self.printer.writeline("_import_ns = {}") self.compiler.has_imports = True for ident, ns in self.compiler.namespaces.items(): - if 'import' in ns.attributes: + if "import" in ns.attributes: self.printer.writeline( "_mako_get_namespace(context, %r)." - "_populate(_import_ns, %r)" % - ( + "_populate(_import_ns, %r)" + % ( ident, - re.split(r'\s*,\s*', ns.attributes['import']) - )) + re.split(r"\s*,\s*", ns.attributes["import"]), + ) + ) if has_loop: - self.printer.writeline( - 'loop = __M_loop = runtime.LoopStack()' - ) + self.printer.writeline("loop = __M_loop = runtime.LoopStack()") for ident in to_write: if ident in comp_idents: @@ -526,37 +549,36 @@ class _GenerateRenderMethod(object): elif ident in self.compiler.namespaces: self.printer.writeline( - "%s = _mako_get_namespace(context, %r)" % - (ident, ident) + "%s = _mako_get_namespace(context, %r)" % (ident, ident) ) else: - if getattr(self.compiler, 'has_ns_imports', False): + if getattr(self.compiler, "has_ns_imports", False): if self.compiler.strict_undefined: self.printer.writelines( - "%s = _import_ns.get(%r, UNDEFINED)" % - (ident, ident), + "%s = _import_ns.get(%r, UNDEFINED)" + % (ident, ident), "if %s is UNDEFINED:" % ident, "try:", "%s = context[%r]" % (ident, ident), "except KeyError:", - "raise NameError(\"'%s' is not defined\")" % - ident, - None, None + "raise NameError(\"'%s' is not defined\")" % ident, + None, + None, ) else: self.printer.writeline( "%s = _import_ns.get" - "(%r, context.get(%r, UNDEFINED))" % - (ident, ident, ident)) + "(%r, context.get(%r, UNDEFINED))" + % (ident, ident, ident) + ) else: if self.compiler.strict_undefined: self.printer.writelines( "try:", "%s = context[%r]" % (ident, ident), "except KeyError:", - "raise NameError(\"'%s' is not defined\")" % - ident, - None + "raise NameError(\"'%s' is not defined\")" % ident, + None, ) else: self.printer.writeline( @@ -572,14 +594,16 @@ class _GenerateRenderMethod(object): nameargs = node.get_argument_expressions(as_call=True) if not self.in_def and ( - len(self.identifiers.locally_assigned) > 0 or - len(self.identifiers.argument_declared) > 0): - nameargs.insert(0, 'context._locals(__M_locals)') + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + nameargs.insert(0, "context._locals(__M_locals)") else: - nameargs.insert(0, 'context') + nameargs.insert(0, "context") self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) self.printer.writeline( - "return render_%s(%s)" % (funcname, ",".join(nameargs))) + "return render_%s(%s)" % (funcname, ",".join(nameargs)) + ) self.printer.writeline(None) def write_inline_def(self, node, identifiers, nested): @@ -590,21 +614,21 @@ class _GenerateRenderMethod(object): decorator = node.decorator if decorator: self.printer.writeline( - "@runtime._decorate_inline(context, %s)" % decorator) + "@runtime._decorate_inline(context, %s)" % decorator + ) self.printer.writeline( - "def %s(%s):" % (node.funcname, ",".join(namedecls))) + "def %s(%s):" % (node.funcname, ",".join(namedecls)) + ) filtered = len(node.filter_args.args) > 0 - buffered = eval(node.attributes.get('buffered', 'False')) - cached = eval(node.attributes.get('cached', 'False')) + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) self.printer.writelines( # push new frame, assign current frame to __M_caller "__M_caller = context.caller_stack._push_frame()", - "try:" + "try:", ) if buffered or filtered or cached: - self.printer.writelines( - "context._push_buffer()", - ) + self.printer.writelines("context._push_buffer()") identifiers = identifiers.branch(node, nested=nested) @@ -618,12 +642,19 @@ class _GenerateRenderMethod(object): self.write_def_finish(node, buffered, filtered, cached) self.printer.writeline(None) if cached: - self.write_cache_decorator(node, node.funcname, - namedecls, False, identifiers, - inline=True, toplevel=False) + self.write_cache_decorator( + node, + node.funcname, + namedecls, + False, + identifiers, + inline=True, + toplevel=False, + ) - def write_def_finish(self, node, buffered, filtered, cached, - callstack=True): + def write_def_finish( + self, node, buffered, filtered, cached, callstack=True + ): """write the end section of a rendering function, either outermost or inline. @@ -636,9 +667,7 @@ class _GenerateRenderMethod(object): self.printer.writeline("return ''") if callstack: self.printer.writelines( - "finally:", - "context.caller_stack._pop_frame()", - None + "finally:", "context.caller_stack._pop_frame()", None ) if buffered or filtered or cached: @@ -648,13 +677,12 @@ class _GenerateRenderMethod(object): # implemenation might be using a context with no # extra buffers self.printer.writelines( - "finally:", - "__M_buf = context._pop_buffer()" + "finally:", "__M_buf = context._pop_buffer()" ) else: self.printer.writelines( "finally:", - "__M_buf, __M_writer = context._pop_buffer_and_writer()" + "__M_buf, __M_writer = context._pop_buffer_and_writer()", ) if callstack: @@ -662,89 +690,100 @@ class _GenerateRenderMethod(object): s = "__M_buf.getvalue()" if filtered: - s = self.create_filter_callable(node.filter_args.args, s, - False) + s = self.create_filter_callable( + node.filter_args.args, s, False + ) self.printer.writeline(None) if buffered and not cached: - s = self.create_filter_callable(self.compiler.buffer_filters, - s, False) + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) if buffered or cached: self.printer.writeline("return %s" % s) else: - self.printer.writelines( - "__M_writer(%s)" % s, - "return ''" - ) - - def write_cache_decorator(self, node_or_pagetag, name, - args, buffered, identifiers, - inline=False, toplevel=False): + self.printer.writelines("__M_writer(%s)" % s, "return ''") + + def write_cache_decorator( + self, + node_or_pagetag, + name, + args, + buffered, + identifiers, + inline=False, + toplevel=False, + ): """write a post-function decorator to replace a rendering callable with a cached version of itself.""" self.printer.writeline("__M_%s = %s" % (name, name)) - cachekey = node_or_pagetag.parsed_attributes.get('cache_key', - repr(name)) + cachekey = node_or_pagetag.parsed_attributes.get( + "cache_key", repr(name) + ) cache_args = {} if self.compiler.pagetag is not None: cache_args.update( - ( - pa[6:], - self.compiler.pagetag.parsed_attributes[pa] - ) + (pa[6:], self.compiler.pagetag.parsed_attributes[pa]) for pa in self.compiler.pagetag.parsed_attributes - if pa.startswith('cache_') and pa != 'cache_key' + if pa.startswith("cache_") and pa != "cache_key" ) cache_args.update( - ( - pa[6:], - node_or_pagetag.parsed_attributes[pa] - ) for pa in node_or_pagetag.parsed_attributes - if pa.startswith('cache_') and pa != 'cache_key' + (pa[6:], node_or_pagetag.parsed_attributes[pa]) + for pa in node_or_pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" ) - if 'timeout' in cache_args: - cache_args['timeout'] = int(eval(cache_args['timeout'])) + if "timeout" in cache_args: + cache_args["timeout"] = int(eval(cache_args["timeout"])) - self.printer.writeline("def %s(%s):" % (name, ','.join(args))) + self.printer.writeline("def %s(%s):" % (name, ",".join(args))) # form "arg1, arg2, arg3=arg3, arg4=arg4", etc. pass_args = [ - "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a - for a in args + "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args ] self.write_variable_declares( identifiers, toplevel=toplevel, - limit=node_or_pagetag.undeclared_identifiers() + limit=node_or_pagetag.undeclared_identifiers(), ) if buffered: - s = "context.get('local')."\ - "cache._ctx_get_or_create("\ - "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" % ( - cachekey, name, ','.join(pass_args), - ''.join(["%s=%s, " % (k, v) - for k, v in cache_args.items()]), - name + s = ( + "context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, ) + ) # apply buffer_filters - s = self.create_filter_callable(self.compiler.buffer_filters, s, - False) + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) self.printer.writelines("return " + s, None) else: self.printer.writelines( "__M_writer(context.get('local')." "cache._ctx_get_or_create(" - "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" % - ( - cachekey, name, ','.join(pass_args), - ''.join(["%s=%s, " % (k, v) - for k, v in cache_args.items()]), + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), name, ), "return ''", - None + None, ) def create_filter_callable(self, args, target, is_expression): @@ -753,14 +792,14 @@ class _GenerateRenderMethod(object): 'default' filter aliases as needed.""" def locate_encode(name): - if re.match(r'decode\..+', name): + if re.match(r"decode\..+", name): return "filters." + name elif self.compiler.disable_unicode: return filters.NON_UNICODE_ESCAPES.get(name, name) else: return filters.DEFAULT_ESCAPES.get(name, name) - if 'n' not in args: + if "n" not in args: if is_expression: if self.compiler.pagetag: args = self.compiler.pagetag.filter_args.args + args @@ -768,9 +807,9 @@ class _GenerateRenderMethod(object): args = self.compiler.default_filters + args for e in args: # if filter given as a function, get just the identifier portion - if e == 'n': + if e == "n": continue - m = re.match(r'(.+?)(\(.*\))', e) + m = re.match(r"(.+?)(\(.*\))", e) if m: ident, fargs = m.group(1, 2) f = locate_encode(ident) @@ -783,15 +822,18 @@ class _GenerateRenderMethod(object): def visitExpression(self, node): self.printer.start_source(node.lineno) - if len(node.escapes) or \ - ( - self.compiler.pagetag is not None and - len(self.compiler.pagetag.filter_args.args) - ) or \ - len(self.compiler.default_filters): - - s = self.create_filter_callable(node.escapes_code.args, - "%s" % node.text, True) + if ( + len(node.escapes) + or ( + self.compiler.pagetag is not None + and len(self.compiler.pagetag.filter_args.args) + ) + or len(self.compiler.default_filters) + ): + + s = self.create_filter_callable( + node.escapes_code.args, "%s" % node.text, True + ) self.printer.writeline("__M_writer(%s)" % s) else: self.printer.writeline("__M_writer(%s)" % node.text) @@ -800,12 +842,12 @@ class _GenerateRenderMethod(object): if node.isend: self.printer.writeline(None) if node.has_loop_context: - self.printer.writeline('finally:') + self.printer.writeline("finally:") self.printer.writeline("loop = __M_loop._exit()") self.printer.writeline(None) else: self.printer.start_source(node.lineno) - if self.compiler.enable_loop and node.keyword == 'for': + if self.compiler.enable_loop and node.keyword == "for": text = mangle_mako_loop(node, self.printer) else: text = node.text @@ -817,12 +859,16 @@ class _GenerateRenderMethod(object): # and end control lines, and # 3) any control line with no content other than comments if not children or ( - compat.all(isinstance(c, (parsetree.Comment, - parsetree.ControlLine)) - for c in children) and - compat.all((node.is_ternary(c.keyword) or c.isend) - for c in children - if isinstance(c, parsetree.ControlLine))): + compat.all( + isinstance(c, (parsetree.Comment, parsetree.ControlLine)) + for c in children + ) + and compat.all( + (node.is_ternary(c.keyword) or c.isend) + for c in children + if isinstance(c, parsetree.ControlLine) + ) + ): self.printer.writeline("pass") def visitText(self, node): @@ -833,8 +879,7 @@ class _GenerateRenderMethod(object): filtered = len(node.filter_args.args) > 0 if filtered: self.printer.writelines( - "__M_writer = context._push_writer()", - "try:", + "__M_writer = context._push_writer()", "try:" ) for n in node.nodes: n.accept_visitor(self) @@ -842,12 +887,11 @@ class _GenerateRenderMethod(object): self.printer.writelines( "finally:", "__M_buf, __M_writer = context._pop_buffer_and_writer()", - "__M_writer(%s)" % - self.create_filter_callable( - node.filter_args.args, - "__M_buf.getvalue()", - False), - None + "__M_writer(%s)" + % self.create_filter_callable( + node.filter_args.args, "__M_buf.getvalue()", False + ), + None, ) def visitCode(self, node): @@ -861,24 +905,28 @@ class _GenerateRenderMethod(object): # which is used for def calls within the same template, # to simulate "enclosing scope" self.printer.writeline( - '__M_locals_builtin_stored = __M_locals_builtin()') + "__M_locals_builtin_stored = __M_locals_builtin()" + ) self.printer.writeline( - '__M_locals.update(__M_dict_builtin([(__M_key,' - ' __M_locals_builtin_stored[__M_key]) for __M_key in' - ' [%s] if __M_key in __M_locals_builtin_stored]))' % - ','.join([repr(x) for x in node.declared_identifiers()])) + "__M_locals.update(__M_dict_builtin([(__M_key," + " __M_locals_builtin_stored[__M_key]) for __M_key in" + " [%s] if __M_key in __M_locals_builtin_stored]))" + % ",".join([repr(x) for x in node.declared_identifiers()]) + ) def visitIncludeTag(self, node): self.printer.start_source(node.lineno) - args = node.attributes.get('args') + args = node.attributes.get("args") if args: self.printer.writeline( - "runtime._include_file(context, %s, _template_uri, %s)" % - (node.parsed_attributes['file'], args)) + "runtime._include_file(context, %s, _template_uri, %s)" + % (node.parsed_attributes["file"], args) + ) else: self.printer.writeline( - "runtime._include_file(context, %s, _template_uri)" % - (node.parsed_attributes['file'])) + "runtime._include_file(context, %s, _template_uri)" + % (node.parsed_attributes["file"]) + ) def visitNamespaceTag(self, node): pass @@ -891,13 +939,14 @@ class _GenerateRenderMethod(object): self.printer.writeline("%s()" % node.funcname) else: nameargs = node.get_argument_expressions(as_call=True) - nameargs += ['**pageargs'] + nameargs += ["**pageargs"] self.printer.writeline( "if 'parent' not in context._data or " - "not hasattr(context._data['parent'], '%s'):" - % node.funcname) + "not hasattr(context._data['parent'], '%s'):" % node.funcname + ) self.printer.writeline( - "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))) + "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)) + ) self.printer.writeline("\n") def visitCallNamespaceTag(self, node): @@ -908,19 +957,18 @@ class _GenerateRenderMethod(object): def visitCallTag(self, node): self.printer.writeline("def ccall(caller):") - export = ['body'] + export = ["body"] callable_identifiers = self.identifiers.branch(node, nested=True) body_identifiers = callable_identifiers.branch(node, nested=False) # we want the 'caller' passed to ccall to be used # for the body() function, but for other non-body() # <%def>s within <%call> we want the current caller # off the call stack (if any) - body_identifiers.add_declared('caller') + body_identifiers.add_declared("caller") self.identifier_stack.append(body_identifiers) class DefVisitor(object): - def visitDefTag(s, node): s.visitDefOrBase(node) @@ -942,16 +990,13 @@ class _GenerateRenderMethod(object): self.identifier_stack.pop() bodyargs = node.body_decl.get_argument_expressions() - self.printer.writeline("def body(%s):" % ','.join(bodyargs)) + self.printer.writeline("def body(%s):" % ",".join(bodyargs)) # TODO: figure out best way to specify # buffering/nonbuffering (at call time would be better) buffered = False if buffered: - self.printer.writelines( - "context._push_buffer()", - "try:" - ) + self.printer.writelines("context._push_buffer()", "try:") self.write_variable_declares(body_identifiers) self.identifier_stack.append(body_identifiers) @@ -960,25 +1005,22 @@ class _GenerateRenderMethod(object): self.identifier_stack.pop() self.write_def_finish(node, buffered, False, False, callstack=False) - self.printer.writelines( - None, - "return [%s]" % (','.join(export)), - None - ) + self.printer.writelines(None, "return [%s]" % (",".join(export)), None) self.printer.writelines( # push on caller for nested call "context.caller_stack.nextcaller = " "runtime.Namespace('caller', context, " "callables=ccall(__M_caller))", - "try:") + "try:", + ) self.printer.start_source(node.lineno) self.printer.writelines( - "__M_writer(%s)" % self.create_filter_callable( - [], node.expression, True), + "__M_writer(%s)" + % self.create_filter_callable([], node.expression, True), "finally:", "context.caller_stack.nextcaller = None", - None + None, ) @@ -996,10 +1038,12 @@ class _Identifiers(object): else: # things that have already been declared # in an enclosing namespace (i.e. names we can just use) - self.declared = set(parent.declared).\ - union([c.name for c in parent.closuredefs.values()]).\ - union(parent.locally_declared).\ - union(parent.argument_declared) + self.declared = ( + set(parent.declared) + .union([c.name for c in parent.closuredefs.values()]) + .union(parent.locally_declared) + .union(parent.argument_declared) + ) # if these identifiers correspond to a "nested" # scope, it means whatever the parent identifiers @@ -1043,11 +1087,13 @@ class _Identifiers(object): node.accept_visitor(self) illegal_names = self.compiler.reserved_names.intersection( - self.locally_declared) + self.locally_declared + ) if illegal_names: raise exceptions.NameConflictError( - "Reserved words declared in template: %s" % - ", ".join(illegal_names)) + "Reserved words declared in template: %s" + % ", ".join(illegal_names) + ) def branch(self, node, **kwargs): """create a new Identifiers for a new Node, with @@ -1060,24 +1106,28 @@ class _Identifiers(object): return set(self.topleveldefs.union(self.closuredefs).values()) def __repr__(self): - return "Identifiers(declared=%r, locally_declared=%r, "\ - "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\ - "argumentdeclared=%r)" %\ - ( + return ( + "Identifiers(declared=%r, locally_declared=%r, " + "undeclared=%r, topleveldefs=%r, closuredefs=%r, " + "argumentdeclared=%r)" + % ( list(self.declared), list(self.locally_declared), list(self.undeclared), [c.name for c in self.topleveldefs.values()], [c.name for c in self.closuredefs.values()], - self.argument_declared) + self.argument_declared, + ) + ) def check_declared(self, node): """update the state of this Identifiers with the undeclared and declared identifiers of the given node.""" for ident in node.undeclared_identifiers(): - if ident != 'context' and\ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) for ident in node.declared_identifiers(): self.locally_declared.add(ident) @@ -1097,7 +1147,8 @@ class _Identifiers(object): if not node.ismodule: self.check_declared(node) self.locally_assigned = self.locally_assigned.union( - node.declared_identifiers()) + node.declared_identifiers() + ) def visitNamespaceTag(self, node): # only traverse into the sub-elements of a @@ -1110,13 +1161,16 @@ class _Identifiers(object): def _check_name_exists(self, collection, node): existing = collection.get(node.funcname) collection[node.funcname] = node - if existing is not None and \ - existing is not node and \ - (node.is_block or existing.is_block): + if ( + existing is not None + and existing is not node + and (node.is_block or existing.is_block) + ): raise exceptions.CompileException( "%%def or %%block named '%s' already " - "exists in this template." % - node.funcname, **node.exception_kwargs) + "exists in this template." % node.funcname, + **node.exception_kwargs + ) def visitDefTag(self, node): if node.is_root() and not node.is_anonymous: @@ -1125,8 +1179,9 @@ class _Identifiers(object): self._check_name_exists(self.closuredefs, node) for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) # visit defs only one level deep @@ -1143,16 +1198,22 @@ class _Identifiers(object): if isinstance(self.node, parsetree.DefTag): raise exceptions.CompileException( "Named block '%s' not allowed inside of def '%s'" - % (node.name, self.node.name), **node.exception_kwargs) - elif isinstance(self.node, - (parsetree.CallTag, parsetree.CallNamespaceTag)): + % (node.name, self.node.name), + **node.exception_kwargs + ) + elif isinstance( + self.node, (parsetree.CallTag, parsetree.CallNamespaceTag) + ): raise exceptions.CompileException( "Named block '%s' not allowed inside of <%%call> tag" - % (node.name, ), **node.exception_kwargs) + % (node.name,), + **node.exception_kwargs + ) for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) if not node.is_anonymous: @@ -1167,8 +1228,9 @@ class _Identifiers(object): def visitTextTag(self, node): for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union(self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) def visitIncludeTag(self, node): @@ -1185,9 +1247,9 @@ class _Identifiers(object): def visitCallTag(self, node): if node is self.node: for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union( - self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) for ident in node.declared_identifiers(): self.argument_declared.add(ident) @@ -1195,15 +1257,15 @@ class _Identifiers(object): n.accept_visitor(self) else: for ident in node.undeclared_identifiers(): - if ident != 'context' and \ - ident not in self.declared.union( - self.locally_declared): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): self.undeclared.add(ident) _FOR_LOOP = re.compile( - r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*' - r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):' + r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):" ) @@ -1218,11 +1280,11 @@ def mangle_mako_loop(node, printer): match = _FOR_LOOP.match(node.text) if match: printer.writelines( - 'loop = __M_loop._enter(%s)' % match.group(2), - 'try:' + "loop = __M_loop._enter(%s)" % match.group(2), + "try:" # 'with __M_loop(%s) as loop:' % match.group(2) ) - text = 'for %s in loop:' % match.group(1) + text = "for %s in loop:" % match.group(1) else: raise SyntaxError("Couldn't apply loop context: %s" % node.text) else: @@ -1239,7 +1301,7 @@ class LoopVariable(object): self.detected = False def _loop_reference_detected(self, node): - if 'loop' in node.undeclared_identifiers(): + if "loop" in node.undeclared_identifiers(): self.detected = True else: for n in node.get_children(): diff --git a/mako/compat.py b/mako/compat.py index 312bbd8..5f9d9ac 100644 --- a/mako/compat.py +++ b/mako/compat.py @@ -1,3 +1,4 @@ +import json # noqa import sys import time @@ -5,9 +6,9 @@ py3k = sys.version_info >= (3, 0) py33 = sys.version_info >= (3, 3) py2k = sys.version_info < (3,) py27 = sys.version_info >= (2, 7) -jython = sys.platform.startswith('java') -win32 = sys.platform.startswith('win') -pypy = hasattr(sys, 'pypy_version_info') +jython = sys.platform.startswith("java") +win32 = sys.platform.startswith("win") +pypy = hasattr(sys, "pypy_version_info") if py3k: # create a "getargspec" from getfullargspec(), which is not deprecated @@ -17,15 +18,16 @@ if py3k: # with that for now. import collections + ArgSpec = collections.namedtuple( - "ArgSpec", - ["args", "varargs", "keywords", "defaults"]) + "ArgSpec", ["args", "varargs", "keywords", "defaults"] + ) from inspect import getfullargspec as inspect_getfullargspec def inspect_getargspec(func): - return ArgSpec( - *inspect_getfullargspec(func)[0:4] - ) + return ArgSpec(*inspect_getfullargspec(func)[0:4]) + + else: from inspect import getargspec as inspect_getargspec # noqa @@ -35,7 +37,8 @@ if py3k: import builtins as compat_builtins from urllib.parse import quote_plus, unquote_plus from html.entities import codepoint2name, name2codepoint - string_types = str, + + string_types = (str,) binary_type = bytes text_type = str @@ -50,8 +53,10 @@ if py3k: def octal(lit): return eval("0o" + lit) + else: import __builtin__ as compat_builtins # noqa + try: from cStringIO import StringIO except: @@ -61,7 +66,8 @@ else: from urllib import quote_plus, unquote_plus # noqa from htmlentitydefs import codepoint2name, name2codepoint # noqa - string_types = basestring, # noqa + + string_types = (basestring,) # noqa binary_type = str text_type = unicode # noqa @@ -80,11 +86,13 @@ if py33: def load_module(module_id, path): return machinery.SourceFileLoader(module_id, path).load_module() + + else: import imp def load_module(module_id, path): - fp = open(path, 'rb') + fp = open(path, "rb") try: return imp.load_source(module_id, path, fp) finally: @@ -92,28 +100,36 @@ else: if py3k: + def reraise(tp, value, tb=None, cause=None): if cause is not None: value.__cause__ = cause if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value + + else: - exec("def reraise(tp, value, tb=None, cause=None):\n" - " raise tp, value, tb\n") + exec( + "def reraise(tp, value, tb=None, cause=None):\n" + " raise tp, value, tb\n" + ) def exception_as(): return sys.exc_info()[1] + try: import threading + if py3k: import _thread as thread else: import thread except ImportError: import dummy_threading as threading # noqa + if py3k: import _dummy_thread as thread else: @@ -127,21 +143,23 @@ else: try: from functools import partial except: + def partial(func, *args, **keywords): def newfunc(*fargs, **fkeywords): newkeywords = keywords.copy() newkeywords.update(fkeywords) return func(*(args + fargs), **newkeywords) + return newfunc -all = all -import json # noqa +all = all # noqa def exception_name(exc): return exc.__class__.__name__ + try: from inspect import CO_VARKEYWORDS, CO_VARARGS @@ -167,17 +185,23 @@ try: return args, varargs, varkw, fn.__defaults__ else: return args, varargs, varkw, fn.func_defaults + + except ImportError: import inspect def inspect_func_args(fn): return inspect.getargspec(fn) + if py3k: - def callable(fn): - return hasattr(fn, '__call__') + # TODO: this has been restored in py3k + def callable(fn): # noqa + return hasattr(fn, "__call__") + + else: - callable = callable + callable = callable # noqa ################################################ @@ -186,6 +210,8 @@ else: def with_metaclass(meta, base=object): """Create a base class with a metaclass.""" return meta("%sBase" % meta.__name__, (base,), {}) + + ################################################ @@ -194,7 +220,7 @@ def arg_stringname(func_arg): In Python3.4 a function's args are of _ast.arg type not _ast.name """ - if hasattr(func_arg, 'arg'): + if hasattr(func_arg, "arg"): return func_arg.arg else: return str(func_arg) diff --git a/mako/exceptions.py b/mako/exceptions.py index e2d78bd..7720356 100644 --- a/mako/exceptions.py +++ b/mako/exceptions.py @@ -6,9 +6,11 @@ """exception classes""" -import traceback import sys -from mako import util, compat +import traceback + +from mako import compat +from mako import util class MakoException(Exception): @@ -27,11 +29,10 @@ def _format_filepos(lineno, pos, filename): class CompileException(MakoException): - def __init__(self, message, source, lineno, pos, filename): MakoException.__init__( - self, - message + _format_filepos(lineno, pos, filename)) + self, message + _format_filepos(lineno, pos, filename) + ) self.lineno = lineno self.pos = pos self.filename = filename @@ -39,11 +40,10 @@ class CompileException(MakoException): class SyntaxException(MakoException): - def __init__(self, message, source, lineno, pos, filename): MakoException.__init__( - self, - message + _format_filepos(lineno, pos, filename)) + self, message + _format_filepos(lineno, pos, filename) + ) self.lineno = lineno self.pos = pos self.filename = filename @@ -115,7 +115,7 @@ class RichTraceback(object): # str(Exception(u'\xe6')) work in Python < 2.6 self.message = self.error.args[0] if not isinstance(self.message, compat.text_type): - self.message = compat.text_type(self.message, 'ascii', 'replace') + self.message = compat.text_type(self.message, "ascii", "replace") def _get_reformatted_records(self, records): for rec in records: @@ -151,12 +151,13 @@ class RichTraceback(object): source, and code line from that line number of the template.""" import mako.template + mods = {} rawrecords = traceback.extract_tb(trcback) new_trcback = [] for filename, lineno, function, line in rawrecords: if not line: - line = '' + line = "" try: (line_map, template_lines) = mods[filename] except KeyError: @@ -169,7 +170,7 @@ class RichTraceback(object): # A normal .py file (not a Template) if not compat.py3k: try: - fp = open(filename, 'rb') + fp = open(filename, "rb") encoding = util.parse_encoding(fp) fp.close() except IOError: @@ -177,20 +178,32 @@ class RichTraceback(object): if encoding: line = line.decode(encoding) else: - line = line.decode('ascii', 'replace') - new_trcback.append((filename, lineno, function, line, - None, None, None, None)) + line = line.decode("ascii", "replace") + new_trcback.append( + ( + filename, + lineno, + function, + line, + None, + None, + None, + None, + ) + ) continue template_ln = 1 - source_map = mako.template.ModuleInfo.\ - get_module_source_metadata( - module_source, full_line_map=True) - line_map = source_map['full_line_map'] + mtm = mako.template.ModuleInfo + source_map = mtm.get_module_source_metadata( + module_source, full_line_map=True + ) + line_map = source_map["full_line_map"] - template_lines = [line_ for line_ in - template_source.split("\n")] + template_lines = [ + line_ for line_ in template_source.split("\n") + ] mods[filename] = (line_map, template_lines) template_ln = line_map[lineno - 1] @@ -199,9 +212,18 @@ class RichTraceback(object): template_line = template_lines[template_ln - 1] else: template_line = None - new_trcback.append((filename, lineno, function, - line, template_filename, template_ln, - template_line, template_source)) + new_trcback.append( + ( + filename, + lineno, + function, + line, + template_filename, + template_ln, + template_line, + template_source, + ) + ) if not self.source: for l in range(len(new_trcback) - 1, 0, -1): if new_trcback[l][5]: @@ -212,17 +234,17 @@ class RichTraceback(object): if new_trcback: try: # A normal .py file (not a Template) - fp = open(new_trcback[-1][0], 'rb') + fp = open(new_trcback[-1][0], "rb") encoding = util.parse_encoding(fp) if compat.py3k and not encoding: - encoding = 'utf-8' + encoding = "utf-8" fp.seek(0) self.source = fp.read() fp.close() if encoding: self.source = self.source.decode(encoding) except IOError: - self.source = '' + self.source = "" self.lineno = new_trcback[-1][1] return new_trcback @@ -235,7 +257,9 @@ def text_error_template(lookup=None): """ import mako.template - return mako.template.Template(r""" + + return mako.template.Template( + r""" <%page args="error=None, traceback=None"/> <%! from mako.exceptions import RichTraceback @@ -249,7 +273,8 @@ Traceback (most recent call last): ${line | trim} % endfor ${tback.errorname}: ${tback.message} -""") +""" + ) def _install_pygments(): @@ -261,9 +286,10 @@ def _install_pygments(): def _install_fallback(): global syntax_highlight, pygments_html_formatter from mako.filters import html_escape + pygments_html_formatter = None - def syntax_highlight(filename='', language=None): + def syntax_highlight(filename="", language=None): return html_escape @@ -272,6 +298,8 @@ def _install_highlighting(): _install_pygments() except ImportError: _install_fallback() + + _install_highlighting() @@ -289,7 +317,9 @@ def html_error_template(): """ import mako.template - return mako.template.Template(r""" + + return mako.template.Template( + r""" <%! from mako.exceptions import RichTraceback, syntax_highlight,\ pygments_html_formatter @@ -392,5 +422,7 @@ def html_error_template(): % endif -""", output_encoding=sys.getdefaultencoding(), - encoding_errors='htmlentityreplace') +""", + output_encoding=sys.getdefaultencoding(), + encoding_errors="htmlentityreplace", + ) diff --git a/mako/ext/autohandler.py b/mako/ext/autohandler.py index 9d1c911..f262b13 100644 --- a/mako/ext/autohandler.py +++ b/mako/ext/autohandler.py @@ -8,29 +8,29 @@ requires that the TemplateLookup class is used with templates. -usage: +usage:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context)}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> -or with custom autohandler filename: +or with custom autohandler filename:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context, name='somefilename')}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> """ -import posixpath import os +import posixpath import re -def autohandler(template, context, name='autohandler'): +def autohandler(template, context, name="autohandler"): lookup = context.lookup _template_uri = template.module._template_uri if not lookup.filesystem_checks: @@ -39,13 +39,14 @@ def autohandler(template, context, name='autohandler'): except KeyError: pass - tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name] + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] while len(tokens): - path = '/' + '/'.join(tokens) + path = "/" + "/".join(tokens) if path != _template_uri and _file_exists(lookup, path): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), path) + (autohandler, _template_uri, name), path + ) else: return path if len(tokens) == 1: @@ -54,15 +55,16 @@ def autohandler(template, context, name='autohandler'): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), None) + (autohandler, _template_uri, name), None + ) else: return None def _file_exists(lookup, path): - psub = re.sub(r'^/', '', path) + psub = re.sub(r"^/", "", path) for d in lookup.directories: - if os.path.exists(d + '/' + psub): + if os.path.exists(d + "/" + psub): return True else: return False diff --git a/mako/ext/babelplugin.py b/mako/ext/babelplugin.py index 0b5e84f..e7e93f5 100644 --- a/mako/ext/babelplugin.py +++ b/mako/ext/babelplugin.py @@ -6,18 +6,19 @@ """gettext message extraction via Babel: http://babel.edgewall.org/""" from babel.messages.extract import extract_python + from mako.ext.extract import MessageExtractor class BabelMakoExtractor(MessageExtractor): - def __init__(self, keywords, comment_tags, options): self.keywords = keywords self.options = options self.config = { - 'comment-tags': u' '.join(comment_tags), - 'encoding': options.get('input_encoding', - options.get('encoding', None)), + "comment-tags": u" ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), } super(BabelMakoExtractor, self).__init__() @@ -25,12 +26,19 @@ class BabelMakoExtractor(MessageExtractor): return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): - comment_tags = self.config['comment-tags'] - for lineno, funcname, messages, python_translator_comments \ - in extract_python(code, - self.keywords, comment_tags, self.options): - yield (code_lineno + (lineno - 1), funcname, messages, - translator_strings + python_translator_comments) + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) def extract(fileobj, keywords, comment_tags, options): diff --git a/mako/ext/beaker_cache.py b/mako/ext/beaker_cache.py index c7c260d..ebca8a9 100644 --- a/mako/ext/beaker_cache.py +++ b/mako/ext/beaker_cache.py @@ -1,7 +1,6 @@ """Provide a :class:`.CacheImpl` for the Beaker caching system.""" from mako import exceptions - from mako.cache import CacheImpl try: @@ -27,36 +26,37 @@ class BeakerCacheImpl(CacheImpl): def __init__(self, cache): if not has_beaker: raise exceptions.RuntimeException( - "Can't initialize Beaker plugin; Beaker is not installed.") + "Can't initialize Beaker plugin; Beaker is not installed." + ) global _beaker_cache if _beaker_cache is None: - if 'manager' in cache.template.cache_args: - _beaker_cache = cache.template.cache_args['manager'] + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] else: _beaker_cache = beaker_cache.CacheManager() super(BeakerCacheImpl, self).__init__(cache) def _get_cache(self, **kw): - expiretime = kw.pop('timeout', None) - if 'dir' in kw: - kw['data_dir'] = kw.pop('dir') + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") elif self.cache.template.module_directory: - kw['data_dir'] = self.cache.template.module_directory + kw["data_dir"] = self.cache.template.module_directory - if 'manager' in kw: - kw.pop('manager') + if "manager" in kw: + kw.pop("manager") - if kw.get('type') == 'memcached': - kw['type'] = 'ext:memcached' + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" - if 'region' in kw: - region = kw.pop('region') + if "region" in kw: + region = kw.pop("region") cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) else: cache = _beaker_cache.get_cache(self.cache.id, **kw) - cache_args = {'starttime': self.cache.starttime} + cache_args = {"starttime": self.cache.starttime} if expiretime: - cache_args['expiretime'] = expiretime + cache_args["expiretime"] = expiretime return cache, cache_args def get_or_create(self, key, creation_function, **kw): diff --git a/mako/ext/extract.py b/mako/ext/extract.py index d777ea8..766129b 100644 --- a/mako/ext/extract.py +++ b/mako/ext/extract.py @@ -1,30 +1,33 @@ import re + from mako import compat from mako import lexer from mako import parsetree class MessageExtractor(object): - def process_file(self, fileobj): template_node = lexer.Lexer( - fileobj.read(), - input_encoding=self.config['encoding']).parse() + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() for extracted in self.extract_nodes(template_node.get_children()): yield extracted def extract_nodes(self, nodes): translator_comments = [] in_translator_comments = False - input_encoding = self.config['encoding'] or 'ascii' + input_encoding = self.config["encoding"] or "ascii" comment_tags = list( - filter(None, re.split(r'\s+', self.config['comment-tags']))) + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) for node in nodes: child_nodes = None - if in_translator_comments and \ - isinstance(node, parsetree.Text) and \ - not node.content.strip(): + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): # Ignore whitespace within translator comments continue @@ -32,13 +35,15 @@ class MessageExtractor(object): value = node.text.strip() if in_translator_comments: translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue for comment_tag in comment_tags: if value.startswith(comment_tag): in_translator_comments = True translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue if isinstance(node, parsetree.DefTag): @@ -69,15 +74,18 @@ class MessageExtractor(object): continue # Comments don't apply unless they immediately precede the message - if translator_comments and \ - translator_comments[-1][0] < node.lineno - 1: + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): translator_comments = [] translator_strings = [ - comment[1] for comment in translator_comments] + comment[1] for comment in translator_comments + ] if isinstance(code, compat.text_type): - code = code.encode(input_encoding, 'backslashreplace') + code = code.encode(input_encoding, "backslashreplace") used_translator_comments = False # We add extra newline to work around a pybabel bug @@ -85,10 +93,11 @@ class MessageExtractor(object): # input string of the input is non-ascii) # Also, because we added it, we have to subtract one from # node.lineno - code = compat.byte_buffer(compat.b('\n') + code) + code = compat.byte_buffer(compat.b("\n") + code) for message in self.process_python( - code, node.lineno - 1, translator_strings): + code, node.lineno - 1, translator_strings + ): yield message used_translator_comments = True @@ -104,5 +113,7 @@ class MessageExtractor(object): def _split_comment(lineno, comment): """Return the multiline comment at lineno split into a list of comment line numbers and the accompanying comment line""" - return [(lineno + index, line) for index, line in - enumerate(comment.splitlines())] + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/mako/ext/linguaplugin.py b/mako/ext/linguaplugin.py index 46b0d6a..dda3422 100644 --- a/mako/ext/linguaplugin.py +++ b/mako/ext/linguaplugin.py @@ -1,43 +1,51 @@ import io + from lingua.extractors import Extractor -from lingua.extractors import Message from lingua.extractors import get_extractor -from mako.ext.extract import MessageExtractor +from lingua.extractors import Message + from mako import compat +from mako.ext.extract import MessageExtractor class LinguaMakoExtractor(Extractor, MessageExtractor): - '''Mako templates''' - extensions = ['.mako'] - default_config = { - 'encoding': 'utf-8', - 'comment-tags': '', - } + """Mako templates""" + + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} def __call__(self, filename, options, fileobj=None): self.options = options self.filename = filename - self.python_extractor = get_extractor('x.py') + self.python_extractor = get_extractor("x.py") if fileobj is None: - fileobj = open(filename, 'rb') + fileobj = open(filename, "rb") return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): source = code.getvalue().strip() - if source.endswith(compat.b(':')): - if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')): - source = compat.b('') # Ignore try/except and else - elif source.startswith(compat.b('elif')): - source = source[2:] # Replace "elif" with "if" - source += compat.b('pass') + if source.endswith(compat.b(":")): + if source in ( + compat.b("try:"), + compat.b("else:"), + ) or source.startswith(compat.b("except")): + source = compat.b("") # Ignore try/except and else + elif source.startswith(compat.b("elif")): + source = source[2:] # Replace "elif" with "if" + source += compat.b("pass") code = io.BytesIO(source) for msg in self.python_extractor( - self.filename, self.options, code, code_lineno -1): + self.filename, self.options, code, code_lineno - 1 + ): if translator_strings: - msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, - msg.flags, - compat.u(' ').join( - translator_strings + [msg.comment]), - msg.tcomment, msg.location) + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + compat.u(" ").join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) yield msg diff --git a/mako/ext/preprocessors.py b/mako/ext/preprocessors.py index 9b700d1..524b87f 100644 --- a/mako/ext/preprocessors.py +++ b/mako/ext/preprocessors.py @@ -17,4 +17,4 @@ def convert_comments(text): from mako.ext.preprocessors import convert_comments t = Template(..., preprocessor=convert_comments)""" - return re.sub(r'(?<=\n)\s*#[^#]', "##", text) + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/mako/ext/pygmentplugin.py b/mako/ext/pygmentplugin.py index 4057caa..809e696 100644 --- a/mako/ext/pygmentplugin.py +++ b/mako/ext/pygmentplugin.py @@ -4,42 +4,70 @@ # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from pygments.lexers.web import \ - HtmlLexer, XmlLexer, JavascriptLexer, CssLexer -from pygments.lexers.agile import PythonLexer, Python3Lexer -from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \ - include, using -from pygments.token import \ - Text, Comment, Operator, Keyword, Name, String, Other -from pygments.formatters.html import HtmlFormatter from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + from mako import compat class MakoLexer(RegexLexer): - name = 'Mako' - aliases = ['mako'] - filenames = ['*.mao'] + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] tokens = { - 'root': [ - (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)', - bygroups(Text, Comment.Preproc, Keyword, Other)), - (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), - (r'(\s*)(##[^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, Other)), - (r'''(?s)<%doc>.*?''', Comment.Preproc), - (r'(<%)([\w\.\:]+)', - bygroups(Comment.Preproc, Name.Builtin), 'tag'), - (r'()', - bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), - (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), - (r'(<%(?:!?))(.*?)(%>)(?s)', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'(\$\{)(.*?)(\})', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'''(?sx) + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"()", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(<%(?:!?))(.*?)(%>)(?s)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) (.+?) # anything, followed by: (?: (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line @@ -52,76 +80,78 @@ class MakoLexer(RegexLexer): (\\\n) | # an escaped newline \Z # end of string ) - ''', bygroups(Other, Operator)), - (r'\s+', Text), + """, + bygroups(Other, Operator), + ), + (r"\s+", Text), ], - 'ondeftags': [ - (r'<%', Comment.Preproc), - (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin), - include('tag'), + "ondeftags": [ + (r"<%", Comment.Preproc), + (r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin), + include("tag"), ], - 'tag': [ - (r'((?:\w+)\s*=)\s*(".*?")', - bygroups(Name.Attribute, String)), - (r'/?\s*>', Comment.Preproc, '#pop'), - (r'\s+', Text), + "tag": [ + (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)), + (r"/?\s*>", Comment.Preproc, "#pop"), + (r"\s+", Text), ], - 'attr': [ - ('".*?"', String, '#pop'), - ("'.*?'", String, '#pop'), - (r'[^\s>]+', String, '#pop'), + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), ], } class MakoHtmlLexer(DelegatingLexer): - name = 'HTML+Mako' - aliases = ['html+mako'] + name = "HTML+Mako" + aliases = ["html+mako"] def __init__(self, **options): - super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, - **options) + super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options) class MakoXmlLexer(DelegatingLexer): - name = 'XML+Mako' - aliases = ['xml+mako'] + name = "XML+Mako" + aliases = ["xml+mako"] def __init__(self, **options): - super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, - **options) + super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options) class MakoJavascriptLexer(DelegatingLexer): - name = 'JavaScript+Mako' - aliases = ['js+mako', 'javascript+mako'] + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] def __init__(self, **options): - super(MakoJavascriptLexer, self).__init__(JavascriptLexer, - MakoLexer, **options) + super(MakoJavascriptLexer, self).__init__( + JavascriptLexer, MakoLexer, **options + ) class MakoCssLexer(DelegatingLexer): - name = 'CSS+Mako' - aliases = ['css+mako'] + name = "CSS+Mako" + aliases = ["css+mako"] def __init__(self, **options): - super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, - **options) + super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options) -pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted', - linenos=True) +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) -def syntax_highlight(filename='', language=None): +def syntax_highlight(filename="", language=None): mako_lexer = MakoLexer() if compat.py3k: python_lexer = Python3Lexer() else: python_lexer = PythonLexer() - if filename.startswith('memory:') or language == 'mako': - return lambda string: highlight(string, mako_lexer, - pygments_html_formatter) - return lambda string: highlight(string, python_lexer, - pygments_html_formatter) + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/mako/ext/turbogears.py b/mako/ext/turbogears.py index eaa2d78..ee1147d 100644 --- a/mako/ext/turbogears.py +++ b/mako/ext/turbogears.py @@ -13,7 +13,7 @@ class TGPlugin(object): """TurboGears compatible Template Plugin.""" - def __init__(self, extra_vars_func=None, options=None, extension='mak'): + def __init__(self, extra_vars_func=None, options=None, extension="mak"): self.extra_vars_func = extra_vars_func self.extension = extension if not options: @@ -22,9 +22,9 @@ class TGPlugin(object): # Pull the options out and initialize the lookup lookup_options = {} for k, v in options.items(): - if k.startswith('mako.'): + if k.startswith("mako."): lookup_options[k[5:]] = v - elif k in ['directories', 'filesystem_checks', 'module_directory']: + elif k in ["directories", "filesystem_checks", "module_directory"]: lookup_options[k] = v self.lookup = TemplateLookup(**lookup_options) @@ -40,14 +40,17 @@ class TGPlugin(object): if template_string is not None: return Template(template_string, **self.tmpl_options) # Translate TG dot notation to normal / template path - if '/' not in templatename: - templatename = '/' + templatename.replace('.', '/') + '.' +\ - self.extension + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) # Lookup template return self.lookup.get_template(templatename) - def render(self, info, format="html", fragment=False, template=None): + def render( + self, info, format="html", fragment=False, template=None # noqa + ): if isinstance(template, compat.string_types): template = self.load_template(template) diff --git a/mako/filters.py b/mako/filters.py index c082690..56d1d73 100644 --- a/mako/filters.py +++ b/mako/filters.py @@ -5,20 +5,21 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php -import re import codecs - -from mako.compat import quote_plus, unquote_plus, codepoint2name, \ - name2codepoint +import re from mako import compat +from mako.compat import codepoint2name +from mako.compat import name2codepoint +from mako.compat import quote_plus +from mako.compat import unquote_plus xml_escapes = { - '&': '&', - '>': '>', - '<': '<', - '"': '"', # also " in html-only - "'": ''' # also ' in html-only + "&": "&", + ">": ">", + "<": "<", + '"': """, # also " in html-only + "'": "'", # also ' in html-only } # XXX: " is valid in HTML and XML @@ -37,6 +38,7 @@ def legacy_html_escape(s): try: import markupsafe + html_escape = markupsafe.escape except ImportError: html_escape = legacy_html_escape @@ -69,7 +71,6 @@ def trim(string): class Decode(object): - def __getattr__(self, key): def decode(x): if isinstance(x, compat.text_type): @@ -78,24 +79,31 @@ class Decode(object): return decode(str(x)) else: return compat.text_type(x, encoding=key) + return decode + + decode = Decode() -_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z') +_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z") def is_ascii_str(text): return isinstance(text, str) and _ASCII_re.match(text) + ################################################################ class XMLEntityEscaper(object): - def __init__(self, codepoint2name, name2codepoint): - self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n)) - for c, n in codepoint2name.items()]) + self.codepoint2entity = dict( + [ + (c, compat.text_type("&%s;" % n)) + for c, n in codepoint2name.items() + ] + ) self.name2codepoint = name2codepoint def escape_entities(self, text): @@ -110,7 +118,7 @@ class XMLEntityEscaper(object): try: return self.codepoint2entity[codepoint] except (KeyError, IndexError): - return '&#x%X;' % codepoint + return "&#x%X;" % codepoint __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') @@ -123,19 +131,22 @@ class XMLEntityEscaper(object): The return value is guaranteed to be ASCII. """ - return self.__escapable.sub(self.__escape, compat.text_type(text) - ).encode('ascii') + return self.__escapable.sub( + self.__escape, compat.text_type(text) + ).encode("ascii") # XXX: This regexp will not match all valid XML entity names__. # (It punts on details involving involving CombiningChars and Extenders.) # # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef - __characterrefs = re.compile(r'''& (?: + __characterrefs = re.compile( + r"""& (?: \#(\d+) | \#x([\da-f]+) | ( (?!\d) [:\w] [-.:\w]+ ) - ) ;''', - re.X | re.UNICODE) + ) ;""", + re.X | re.UNICODE, + ) def __unescape(self, m): dval, hval, name = m.groups() @@ -144,7 +155,7 @@ class XMLEntityEscaper(object): elif hval: codepoint = int(hval, 16) else: - codepoint = self.name2codepoint.get(name, 0xfffd) + codepoint = self.name2codepoint.get(name, 0xFFFD) # U+FFFD = "REPLACEMENT CHARACTER" if codepoint < 128: return chr(codepoint) @@ -168,42 +179,41 @@ html_entities_unescape = _html_entities_escaper.unescape def htmlentityreplace_errors(ex): """An encoding error handler. - This python `codecs`_ error handler replaces unencodable + This python codecs error handler replaces unencodable characters with HTML entities, or, if no HTML entity exists for - the character, XML character references. + the character, XML character references:: - >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') - 'The cost was €12.' + >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' """ if isinstance(ex, UnicodeEncodeError): # Handle encoding errors - bad_text = ex.object[ex.start:ex.end] + bad_text = ex.object[ex.start : ex.end] text = _html_entities_escaper.escape(bad_text) return (compat.text_type(text), ex.end) raise ex -codecs.register_error('htmlentityreplace', htmlentityreplace_errors) + +codecs.register_error("htmlentityreplace", htmlentityreplace_errors) # TODO: options to make this dynamic per-compilation will be added in a later # release DEFAULT_ESCAPES = { - 'x': 'filters.xml_escape', - 'h': 'filters.html_escape', - 'u': 'filters.url_escape', - 'trim': 'filters.trim', - 'entity': 'filters.html_entities_escape', - 'unicode': 'unicode', - 'decode': 'decode', - 'str': 'str', - 'n': 'n' + "x": "filters.xml_escape", + "h": "filters.html_escape", + "u": "filters.url_escape", + "trim": "filters.trim", + "entity": "filters.html_entities_escape", + "unicode": "unicode", + "decode": "decode", + "str": "str", + "n": "n", } if compat.py3k: - DEFAULT_ESCAPES.update({ - 'unicode': 'str' - }) + DEFAULT_ESCAPES.update({"unicode": "str"}) NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy() -NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape' -NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape' +NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape" +NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape" diff --git a/mako/lexer.py b/mako/lexer.py index cf4187f..e11a949 100644 --- a/mako/lexer.py +++ b/mako/lexer.py @@ -6,19 +6,26 @@ """provides the Lexer class for parsing template strings into parse trees.""" -import re import codecs -from mako import parsetree, exceptions, compat +import re + +from mako import compat +from mako import exceptions +from mako import parsetree from mako.pygen import adjust_whitespace _regexp_cache = {} class Lexer(object): - - def __init__(self, text, filename=None, - disable_unicode=False, - input_encoding=None, preprocessor=None): + def __init__( + self, + text, + filename=None, + disable_unicode=False, + input_encoding=None, + preprocessor=None, + ): self.text = text self.filename = filename self.template = parsetree.TemplateNode(self.filename) @@ -34,22 +41,24 @@ class Lexer(object): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) if preprocessor is None: self.preprocessor = [] - elif not hasattr(preprocessor, '__iter__'): + elif not hasattr(preprocessor, "__iter__"): self.preprocessor = [preprocessor] else: self.preprocessor = preprocessor @property def exception_kwargs(self): - return {'source': self.text, - 'lineno': self.matched_lineno, - 'pos': self.matched_charpos, - 'filename': self.filename} + return { + "source": self.text, + "lineno": self.matched_lineno, + "pos": self.matched_charpos, + "filename": self.filename, + } def match(self, regexp, flags=None): """compile the given regexp, cache the reg, and call match_reg().""" @@ -83,9 +92,9 @@ class Lexer(object): else: self.match_position = end self.matched_lineno = self.lineno - lines = re.findall(r"\n", self.text[mp:self.match_position]) + lines = re.findall(r"\n", self.text[mp : self.match_position]) cp = mp - 1 - while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'): + while cp >= 0 and cp < self.textlength and self.text[cp] != "\n": cp -= 1 self.matched_charpos = mp - cp self.lineno += len(lines) @@ -97,46 +106,49 @@ class Lexer(object): def parse_until_text(self, watch_nesting, *text): startpos = self.match_position - text_re = r'|'.join(text) + text_re = r"|".join(text) brace_level = 0 paren_level = 0 bracket_level = 0 while True: - match = self.match(r'#.*\n') + match = self.match(r"#.*\n") if match: continue - match = self.match(r'(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1', - re.S) + match = self.match( + r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S + ) if match: continue - match = self.match(r'(%s)' % text_re) - if match and not (watch_nesting - and (brace_level > 0 or paren_level > 0 - or bracket_level > 0)): - return \ - self.text[startpos: - self.match_position - len(match.group(1))],\ - match.group(1) + match = self.match(r"(%s)" % text_re) + if match and not ( + watch_nesting + and (brace_level > 0 or paren_level > 0 or bracket_level > 0) + ): + return ( + self.text[ + startpos : self.match_position - len(match.group(1)) + ], + match.group(1), + ) elif not match: match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) if match: - brace_level += match.group(1).count('{') - brace_level -= match.group(1).count('}') - paren_level += match.group(1).count('(') - paren_level -= match.group(1).count(')') - bracket_level += match.group(1).count('[') - bracket_level -= match.group(1).count(']') + brace_level += match.group(1).count("{") + brace_level -= match.group(1).count("}") + paren_level += match.group(1).count("(") + paren_level -= match.group(1).count(")") + bracket_level += match.group(1).count("[") + bracket_level -= match.group(1).count("]") continue raise exceptions.SyntaxException( - "Expected: %s" % - ','.join(text), - **self.exception_kwargs) + "Expected: %s" % ",".join(text), **self.exception_kwargs + ) def append_node(self, nodecls, *args, **kwargs): - kwargs.setdefault('source', self.text) - kwargs.setdefault('lineno', self.matched_lineno) - kwargs.setdefault('pos', self.matched_charpos) - kwargs['filename'] = self.filename + kwargs.setdefault("source", self.text) + kwargs.setdefault("lineno", self.matched_lineno) + kwargs.setdefault("pos", self.matched_charpos) + kwargs["filename"] = self.filename node = nodecls(*args, **kwargs) if len(self.tag): self.tag[-1].nodes.append(node) @@ -149,8 +161,10 @@ class Lexer(object): if self.control_line: control_frame = self.control_line[-1] control_frame.nodes.append(node) - if not (isinstance(node, parsetree.ControlLine) and - control_frame.is_ternary(node.keyword)): + if not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ): if self.ternary_stack and self.ternary_stack[-1]: self.ternary_stack[-1][-1].nodes.append(node) if isinstance(node, parsetree.Tag): @@ -164,17 +178,20 @@ class Lexer(object): elif node.is_primary: self.control_line.append(node) self.ternary_stack.append([]) - elif self.control_line and \ - self.control_line[-1].is_ternary(node.keyword): + elif self.control_line and self.control_line[-1].is_ternary( + node.keyword + ): self.ternary_stack[-1].append(node) - elif self.control_line and \ - not self.control_line[-1].is_ternary(node.keyword): + elif self.control_line and not self.control_line[-1].is_ternary( + node.keyword + ): raise exceptions.SyntaxException( - "Keyword '%s' not a legal ternary for keyword '%s'" % - (node.keyword, self.control_line[-1].keyword), - **self.exception_kwargs) + "Keyword '%s' not a legal ternary for keyword '%s'" + % (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs + ) - _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n') + _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") def decode_raw_stream(self, text, decode_raw, known_encoding, filename): """given string/unicode or bytes/string, determine encoding @@ -184,44 +201,48 @@ class Lexer(object): """ if isinstance(text, compat.text_type): m = self._coding_re.match(text) - encoding = m and m.group(1) or known_encoding or 'ascii' + encoding = m and m.group(1) or known_encoding or "ascii" return encoding, text if text.startswith(codecs.BOM_UTF8): - text = text[len(codecs.BOM_UTF8):] - parsed_encoding = 'utf-8' - m = self._coding_re.match(text.decode('utf-8', 'ignore')) - if m is not None and m.group(1) != 'utf-8': + text = text[len(codecs.BOM_UTF8) :] + parsed_encoding = "utf-8" + m = self._coding_re.match(text.decode("utf-8", "ignore")) + if m is not None and m.group(1) != "utf-8": raise exceptions.CompileException( "Found utf-8 BOM in file, with conflicting " "magic encoding comment of '%s'" % m.group(1), - text.decode('utf-8', 'ignore'), - 0, 0, filename) + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) else: - m = self._coding_re.match(text.decode('utf-8', 'ignore')) + m = self._coding_re.match(text.decode("utf-8", "ignore")) if m: parsed_encoding = m.group(1) else: - parsed_encoding = known_encoding or 'ascii' + parsed_encoding = known_encoding or "ascii" if decode_raw: try: text = text.decode(parsed_encoding) except UnicodeDecodeError: raise exceptions.CompileException( - "Unicode decode operation of encoding '%s' failed" % - parsed_encoding, - text.decode('utf-8', 'ignore'), - 0, 0, filename) + "Unicode decode operation of encoding '%s' failed" + % parsed_encoding, + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) return parsed_encoding, text def parse(self): self.encoding, self.text = self.decode_raw_stream( - self.text, - not self.disable_unicode, - self.encoding, - self.filename) + self.text, not self.disable_unicode, self.encoding, self.filename + ) for preproc in self.preprocessor: self.text = preproc(self.text) @@ -232,7 +253,7 @@ class Lexer(object): self.textlength = len(self.text) - while (True): + while True: if self.match_position > self.textlength: break @@ -258,20 +279,24 @@ class Lexer(object): raise exceptions.CompileException("assertion failed") if len(self.tag): - raise exceptions.SyntaxException("Unclosed tag: <%%%s>" % - self.tag[-1].keyword, - **self.exception_kwargs) + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs + ) if len(self.control_line): raise exceptions.SyntaxException( - "Unterminated control keyword: '%s'" % - self.control_line[-1].keyword, + "Unterminated control keyword: '%s'" + % self.control_line[-1].keyword, self.text, self.control_line[-1].lineno, - self.control_line[-1].pos, self.filename) + self.control_line[-1].pos, + self.filename, + ) return self.template def match_tag_start(self): - match = self.match(r''' + match = self.match( + r""" \<% # opening tag ([\w\.\:]+) # keyword @@ -283,9 +308,9 @@ class Lexer(object): (/)?> # closing - ''', - - re.I | re.S | re.X) + """, + re.I | re.S | re.X, + ) if match: keyword, attr, isend = match.groups() @@ -293,22 +318,23 @@ class Lexer(object): attributes = {} if attr: for att in re.findall( - r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr): + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): key, val1, val2 = att text = val1 or val2 - text = text.replace('\r\n', '\n') + text = text.replace("\r\n", "\n") attributes[key] = text self.append_node(parsetree.Tag, keyword, attributes) if isend: self.tag.pop() else: - if keyword == 'text': - match = self.match(r'(.*?)(?=\)', re.S) + if keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) if not match: raise exceptions.SyntaxException( - "Unclosed tag: <%%%s>" % - self.tag[-1].keyword, - **self.exception_kwargs) + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs + ) self.append_node(parsetree.Text, match.group(1)) return self.match_tag_end() return True @@ -316,25 +342,27 @@ class Lexer(object): return False def match_tag_end(self): - match = self.match(r'\') + match = self.match(r"\") if match: if not len(self.tag): raise exceptions.SyntaxException( - "Closing tag without opening tag: " % - match.group(1), - **self.exception_kwargs) + "Closing tag without opening tag: " + % match.group(1), + **self.exception_kwargs + ) elif self.tag[-1].keyword != match.group(1): raise exceptions.SyntaxException( - "Closing tag does not match tag: <%%%s>" % - (match.group(1), self.tag[-1].keyword), - **self.exception_kwargs) + "Closing tag does not match tag: <%%%s>" + % (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs + ) self.tag.pop() return True else: return False def match_end(self): - match = self.match(r'\Z', re.S) + match = self.match(r"\Z", re.S) if match: string = match.group() if string: @@ -345,7 +373,8 @@ class Lexer(object): return False def match_text(self): - match = self.match(r""" + match = self.match( + r""" (.*?) # anything, followed by: ( (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based @@ -360,7 +389,9 @@ class Lexer(object): (\\\r?\n) # an escaped newline - throw away | \Z # end of string - )""", re.X | re.S) + )""", + re.X | re.S, + ) if match: text = match.group(1) @@ -374,14 +405,17 @@ class Lexer(object): match = self.match(r"<%(!)?") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(False, r'%>') + text, end = self.parse_until_text(False, r"%>") # the trailing newline helps # compiler.parse() not complain about indentation text = adjust_whitespace(text) + "\n" self.append_node( parsetree.Code, text, - match.group(1) == '!', lineno=line, pos=pos) + match.group(1) == "!", + lineno=line, + pos=pos, + ) return True else: return False @@ -390,16 +424,19 @@ class Lexer(object): match = self.match(r"\${") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(True, r'\|', r'}') - if end == '|': - escapes, end = self.parse_until_text(True, r'}') + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") else: escapes = "" - text = text.replace('\r\n', '\n') + text = text.replace("\r\n", "\n") self.append_node( parsetree.Expression, - text, escapes.strip(), - lineno=line, pos=pos) + text, + escapes.strip(), + lineno=line, + pos=pos, + ) return True else: return False @@ -407,31 +444,35 @@ class Lexer(object): def match_control_line(self): match = self.match( r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)" - r"(?:\r?\n|\Z)", re.M) + r"(?:\r?\n|\Z)", + re.M, + ) if match: operator = match.group(1) text = match.group(2) - if operator == '%': - m2 = re.match(r'(end)?(\w+)\s*(.*)', text) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) if not m2: raise exceptions.SyntaxException( - "Invalid control line: '%s'" % - text, - **self.exception_kwargs) + "Invalid control line: '%s'" % text, + **self.exception_kwargs + ) isend, keyword = m2.group(1, 2) - isend = (isend is not None) + isend = isend is not None if isend: if not len(self.control_line): raise exceptions.SyntaxException( - "No starting keyword '%s' for '%s'" % - (keyword, text), - **self.exception_kwargs) + "No starting keyword '%s' for '%s'" + % (keyword, text), + **self.exception_kwargs + ) elif self.control_line[-1].keyword != keyword: raise exceptions.SyntaxException( - "Keyword '%s' doesn't match keyword '%s'" % - (text, self.control_line[-1].keyword), - **self.exception_kwargs) + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs + ) self.append_node(parsetree.ControlLine, keyword, isend, text) else: self.append_node(parsetree.Comment, text) diff --git a/mako/lookup.py b/mako/lookup.py index 0d3f304..a3010d5 100644 --- a/mako/lookup.py +++ b/mako/lookup.py @@ -5,10 +5,12 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php import os -import stat import posixpath import re -from mako import exceptions, util +import stat + +from mako import exceptions +from mako import util from mako.template import Template try: @@ -151,41 +153,41 @@ class TemplateLookup(TemplateCollection): """ - def __init__(self, - directories=None, - module_directory=None, - filesystem_checks=True, - collection_size=-1, - format_exceptions=False, - error_handler=None, - disable_unicode=False, - bytestring_passthrough=False, - output_encoding=None, - encoding_errors='strict', - - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - - modulename_callable=None, - module_writer=None, - default_filters=None, - buffer_filters=(), - strict_undefined=False, - imports=None, - future_imports=None, - enable_loop=True, - input_encoding=None, - preprocessor=None, - lexer_cls=None, - include_error_handler=None): - - self.directories = [posixpath.normpath(d) for d in - util.to_list(directories, ()) - ] + def __init__( + self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + disable_unicode=False, + bytestring_passthrough=False, + output_encoding=None, + encoding_errors="strict", + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + + self.directories = [ + posixpath.normpath(d) for d in util.to_list(directories, ()) + ] self.module_directory = module_directory self.modulename_callable = modulename_callable self.filesystem_checks = filesystem_checks @@ -195,34 +197,34 @@ class TemplateLookup(TemplateCollection): cache_args = {} # transfer deprecated cache_* args if cache_dir: - cache_args.setdefault('dir', cache_dir) + cache_args.setdefault("dir", cache_dir) if cache_url: - cache_args.setdefault('url', cache_url) + cache_args.setdefault("url", cache_url) if cache_type: - cache_args.setdefault('type', cache_type) + cache_args.setdefault("type", cache_type) self.template_args = { - 'format_exceptions': format_exceptions, - 'error_handler': error_handler, - 'include_error_handler': include_error_handler, - 'disable_unicode': disable_unicode, - 'bytestring_passthrough': bytestring_passthrough, - 'output_encoding': output_encoding, - 'cache_impl': cache_impl, - 'encoding_errors': encoding_errors, - 'input_encoding': input_encoding, - 'module_directory': module_directory, - 'module_writer': module_writer, - 'cache_args': cache_args, - 'cache_enabled': cache_enabled, - 'default_filters': default_filters, - 'buffer_filters': buffer_filters, - 'strict_undefined': strict_undefined, - 'imports': imports, - 'future_imports': future_imports, - 'enable_loop': enable_loop, - 'preprocessor': preprocessor, - 'lexer_cls': lexer_cls + "format_exceptions": format_exceptions, + "error_handler": error_handler, + "include_error_handler": include_error_handler, + "disable_unicode": disable_unicode, + "bytestring_passthrough": bytestring_passthrough, + "output_encoding": output_encoding, + "cache_impl": cache_impl, + "encoding_errors": encoding_errors, + "input_encoding": input_encoding, + "module_directory": module_directory, + "module_writer": module_writer, + "cache_args": cache_args, + "cache_enabled": cache_enabled, + "default_filters": default_filters, + "buffer_filters": buffer_filters, + "strict_undefined": strict_undefined, + "imports": imports, + "future_imports": future_imports, + "enable_loop": enable_loop, + "preprocessor": preprocessor, + "lexer_cls": lexer_cls, } if collection_size == -1: @@ -248,17 +250,18 @@ class TemplateLookup(TemplateCollection): else: return self._collection[uri] except KeyError: - u = re.sub(r'^\/+', '', uri) - for dir in self.directories: + u = re.sub(r"^\/+", "", uri) + for dir_ in self.directories: # make sure the path seperators are posix - os.altsep is empty # on POSIX and cannot be used. - dir = dir.replace(os.path.sep, posixpath.sep) - srcfile = posixpath.normpath(posixpath.join(dir, u)) + dir_ = dir_.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir_, u)) if os.path.isfile(srcfile): return self._load(srcfile, uri) else: raise exceptions.TopLevelLookupException( - "Cant locate template for uri %r" % uri) + "Cant locate template for uri %r" % uri + ) def adjust_uri(self, uri, relativeto): """Adjust the given ``uri`` based on the given relative URI.""" @@ -267,12 +270,13 @@ class TemplateLookup(TemplateCollection): if key in self._uri_cache: return self._uri_cache[key] - if uri[0] != '/': + if uri[0] != "/": if relativeto is not None: v = self._uri_cache[key] = posixpath.join( - posixpath.dirname(relativeto), uri) + posixpath.dirname(relativeto), uri + ) else: - v = self._uri_cache[key] = '/' + uri + v = self._uri_cache[key] = "/" + uri else: v = self._uri_cache[key] = uri return v @@ -295,9 +299,9 @@ class TemplateLookup(TemplateCollection): """ filename = posixpath.normpath(filename) - for dir in self.directories: - if filename[0:len(dir)] == dir: - return filename[len(dir):] + for dir_ in self.directories: + if filename[0 : len(dir_)] == dir_: + return filename[len(dir_) :] else: return None @@ -320,7 +324,8 @@ class TemplateLookup(TemplateCollection): filename=posixpath.normpath(filename), lookup=self, module_filename=module_filename, - **self.template_args) + **self.template_args + ) return template except: # if compilation fails etc, ensure @@ -337,8 +342,7 @@ class TemplateLookup(TemplateCollection): try: template_stat = os.stat(template.filename) - if template.module._modified_time < \ - template_stat[stat.ST_MTIME]: + if template.module._modified_time < template_stat[stat.ST_MTIME]: self._collection.pop(uri, None) return self._load(template.filename, uri) else: @@ -346,7 +350,8 @@ class TemplateLookup(TemplateCollection): except OSError: self._collection.pop(uri, None) raise exceptions.TemplateLookupException( - "Cant locate template for uri %r" % uri) + "Cant locate template for uri %r" % uri + ) def put_string(self, uri, text): """Place a new :class:`.Template` object into this @@ -355,10 +360,8 @@ class TemplateLookup(TemplateCollection): """ self._collection[uri] = Template( - text, - lookup=self, - uri=uri, - **self.template_args) + text, lookup=self, uri=uri, **self.template_args + ) def put_template(self, uri, template): """Place a new :class:`.Template` object into this diff --git a/mako/parsetree.py b/mako/parsetree.py index e129916..7f06667 100644 --- a/mako/parsetree.py +++ b/mako/parsetree.py @@ -6,9 +6,14 @@ """defines the parse tree components for Mako templates.""" -from mako import exceptions, ast, util, filters, compat import re +from mako import ast +from mako import compat +from mako import exceptions +from mako import filters +from mako import util + class Node(object): @@ -22,8 +27,12 @@ class Node(object): @property def exception_kwargs(self): - return {'source': self.source, 'lineno': self.lineno, - 'pos': self.pos, 'filename': self.filename} + return { + "source": self.source, + "lineno": self.lineno, + "pos": self.pos, + "filename": self.filename, + } def get_children(self): return [] @@ -42,7 +51,7 @@ class TemplateNode(Node): """a 'container' node that stores the overall collection of nodes.""" def __init__(self, filename): - super(TemplateNode, self).__init__('', 0, 0, filename) + super(TemplateNode, self).__init__("", 0, 0, filename) self.nodes = [] self.page_attributes = {} @@ -52,7 +61,8 @@ class TemplateNode(Node): def __repr__(self): return "TemplateNode(%s, %r)" % ( util.sorted_dict_repr(self.page_attributes), - self.nodes) + self.nodes, + ) class ControlLine(Node): @@ -74,7 +84,7 @@ class ControlLine(Node): self.text = text self.keyword = keyword self.isend = isend - self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with'] + self.is_primary = keyword in ["for", "if", "while", "try", "with"] self.nodes = [] if self.isend: self._declared_identifiers = [] @@ -98,9 +108,9 @@ class ControlLine(Node): for this ControlLine""" return keyword in { - 'if': set(['else', 'elif']), - 'try': set(['except', 'finally']), - 'for': set(['else']) + "if": set(["else", "elif"]), + "try": set(["except", "finally"]), + "for": set(["else"]), }.get(self.keyword, []) def __repr__(self): @@ -108,7 +118,7 @@ class ControlLine(Node): self.keyword, self.text, self.isend, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -158,7 +168,7 @@ class Code(Node): return "Code(%r, %r, %r)" % ( self.text, self.ismodule, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -208,7 +218,7 @@ class Expression(Node): return "Expression(%r, %r, %r)" % ( self.text, self.escapes_code.args, - (self.lineno, self.pos) + (self.lineno, self.pos), ) @@ -219,45 +229,55 @@ class _TagMeta(type): _classmap = {} - def __init__(cls, clsname, bases, dict): - if getattr(cls, '__keyword__', None) is not None: + def __init__(cls, clsname, bases, dict_): + if getattr(cls, "__keyword__", None) is not None: cls._classmap[cls.__keyword__] = cls - super(_TagMeta, cls).__init__(clsname, bases, dict) + super(_TagMeta, cls).__init__(clsname, bases, dict_) def __call__(cls, keyword, attributes, **kwargs): if ":" in keyword: - ns, defname = keyword.split(':') - return type.__call__(CallNamespaceTag, ns, defname, - attributes, **kwargs) + ns, defname = keyword.split(":") + return type.__call__( + CallNamespaceTag, ns, defname, attributes, **kwargs + ) try: cls = _TagMeta._classmap[keyword] except KeyError: raise exceptions.CompileException( "No such tag: '%s'" % keyword, - source=kwargs['source'], - lineno=kwargs['lineno'], - pos=kwargs['pos'], - filename=kwargs['filename'] + source=kwargs["source"], + lineno=kwargs["lineno"], + pos=kwargs["pos"], + filename=kwargs["filename"], ) return type.__call__(cls, keyword, attributes, **kwargs) class Tag(compat.with_metaclass(_TagMeta, Node)): - """abstract base class for tags. - <%sometag/> + e.g.:: + + <%sometag/> - <%someothertag> - stuff - + <%someothertag> + stuff + """ + __keyword__ = None - def __init__(self, keyword, attributes, expressions, - nonexpressions, required, **kwargs): + def __init__( + self, + keyword, + attributes, + expressions, + nonexpressions, + required, + **kwargs + ): r"""construct a new Tag instance. this constructor not called directly, and is only called @@ -284,9 +304,10 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): missing = [r for r in required if r not in self.parsed_attributes] if len(missing): raise exceptions.CompileException( - "Missing attribute(s): %s" % - ",".join([repr(m) for m in missing]), - **self.exception_kwargs) + "Missing attribute(s): %s" + % ",".join([repr(m) for m in missing]), + **self.exception_kwargs + ) self.parent = None self.nodes = [] @@ -302,36 +323,40 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): for key in self.attributes: if key in expressions: expr = [] - for x in re.compile(r'(\${.+?})', - re.S).split(self.attributes[key]): - m = re.compile(r'^\${(.+?)}$', re.S).match(x) + for x in re.compile(r"(\${.+?})", re.S).split( + self.attributes[key] + ): + m = re.compile(r"^\${(.+?)}$", re.S).match(x) if m: - code = ast.PythonCode(m.group(1).rstrip(), - **self.exception_kwargs) + code = ast.PythonCode( + m.group(1).rstrip(), **self.exception_kwargs + ) # we aren't discarding "declared_identifiers" here, # which we do so that list comprehension-declared # variables aren't counted. As yet can't find a # condition that requires it here. - undeclared_identifiers = \ - undeclared_identifiers.union( - code.undeclared_identifiers) - expr.append('(%s)' % m.group(1)) + undeclared_identifiers = undeclared_identifiers.union( + code.undeclared_identifiers + ) + expr.append("(%s)" % m.group(1)) else: if x: expr.append(repr(x)) - self.parsed_attributes[key] = " + ".join(expr) or repr('') + self.parsed_attributes[key] = " + ".join(expr) or repr("") elif key in nonexpressions: - if re.search(r'\${.+?}', self.attributes[key]): + if re.search(r"\${.+?}", self.attributes[key]): raise exceptions.CompileException( "Attibute '%s' in tag '%s' does not allow embedded " "expressions" % (key, self.keyword), - **self.exception_kwargs) + **self.exception_kwargs + ) self.parsed_attributes[key] = repr(self.attributes[key]) else: raise exceptions.CompileException( - "Invalid attribute for tag '%s': '%s'" % - (self.keyword, key), - **self.exception_kwargs) + "Invalid attribute for tag '%s': '%s'" + % (self.keyword, key), + **self.exception_kwargs + ) self.expression_undeclared_identifiers = undeclared_identifiers def declared_identifiers(self): @@ -341,56 +366,64 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): return self.expression_undeclared_identifiers def __repr__(self): - return "%s(%r, %s, %r, %r)" % (self.__class__.__name__, - self.keyword, - util.sorted_dict_repr(self.attributes), - (self.lineno, self.pos), - self.nodes - ) + return "%s(%r, %s, %r, %r)" % ( + self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes, + ) class IncludeTag(Tag): - __keyword__ = 'include' + __keyword__ = "include" def __init__(self, keyword, attributes, **kwargs): super(IncludeTag, self).__init__( keyword, attributes, - ('file', 'import', 'args'), - (), ('file',), **kwargs) + ("file", "import", "args"), + (), + ("file",), + **kwargs + ) self.page_args = ast.PythonCode( - "__DUMMY(%s)" % attributes.get('args', ''), - **self.exception_kwargs) + "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return [] def undeclared_identifiers(self): - identifiers = self.page_args.undeclared_identifiers.\ - difference(set(["__DUMMY"])).\ - difference(self.page_args.declared_identifiers) - return identifiers.union(super(IncludeTag, self). - undeclared_identifiers()) + identifiers = self.page_args.undeclared_identifiers.difference( + set(["__DUMMY"]) + ).difference(self.page_args.declared_identifiers) + return identifiers.union( + super(IncludeTag, self).undeclared_identifiers() + ) class NamespaceTag(Tag): - __keyword__ = 'namespace' + __keyword__ = "namespace" def __init__(self, keyword, attributes, **kwargs): super(NamespaceTag, self).__init__( - keyword, attributes, - ('file',), - ('name', 'inheritable', - 'import', 'module'), - (), **kwargs) - - self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self)))) - if 'name' not in attributes and 'import' not in attributes: + keyword, + attributes, + ("file",), + ("name", "inheritable", "import", "module"), + (), + **kwargs + ) + + self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) + if "name" not in attributes and "import" not in attributes: raise exceptions.CompileException( "'name' and/or 'import' attributes are required " "for <%namespace>", - **self.exception_kwargs) - if 'file' in attributes and 'module' in attributes: + **self.exception_kwargs + ) + if "file" in attributes and "module" in attributes: raise exceptions.CompileException( "<%namespace> may only have one of 'file' or 'module'", **self.exception_kwargs @@ -401,51 +434,51 @@ class NamespaceTag(Tag): class TextTag(Tag): - __keyword__ = 'text' + __keyword__ = "text" def __init__(self, keyword, attributes, **kwargs): super(TextTag, self).__init__( - keyword, - attributes, (), - ('filter'), (), **kwargs) + keyword, attributes, (), ("filter"), (), **kwargs + ) self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) def undeclared_identifiers(self): - return self.filter_args.\ - undeclared_identifiers.\ - difference(filters.DEFAULT_ESCAPES.keys()).union( - self.expression_undeclared_identifiers - ) + return self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ).union(self.expression_undeclared_identifiers) class DefTag(Tag): - __keyword__ = 'def' + __keyword__ = "def" def __init__(self, keyword, attributes, **kwargs): - expressions = ['buffered', 'cached'] + [ - c for c in attributes if c.startswith('cache_')] + expressions = ["buffered", "cached"] + [ + c for c in attributes if c.startswith("cache_") + ] super(DefTag, self).__init__( keyword, attributes, expressions, - ('name', 'filter', 'decorator'), - ('name',), - **kwargs) - name = attributes['name'] - if re.match(r'^[\w_]+$', name): + ("name", "filter", "decorator"), + ("name",), + **kwargs + ) + name = attributes["name"] + if re.match(r"^[\w_]+$", name): raise exceptions.CompileException( - "Missing parenthesis in %def", - **self.exception_kwargs) - self.function_decl = ast.FunctionDecl("def " + name + ":pass", - **self.exception_kwargs) + "Missing parenthesis in %def", **self.exception_kwargs + ) + self.function_decl = ast.FunctionDecl( + "def " + name + ":pass", **self.exception_kwargs + ) self.name = self.function_decl.funcname - self.decorator = attributes.get('decorator', '') + self.decorator = attributes.get("decorator", "") self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) is_anonymous = False is_block = False @@ -463,51 +496,58 @@ class DefTag(Tag): def undeclared_identifiers(self): res = [] for c in self.function_decl.defaults: - res += list(ast.PythonCode(c, **self.exception_kwargs). - undeclared_identifiers) - return set(res).union( - self.filter_args. - undeclared_identifiers. - difference(filters.DEFAULT_ESCAPES.keys()) - ).union( - self.expression_undeclared_identifiers - ).difference( - self.function_decl.allargnames + res += list( + ast.PythonCode( + c, **self.exception_kwargs + ).undeclared_identifiers + ) + return ( + set(res) + .union( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ) + .union(self.expression_undeclared_identifiers) + .difference(self.function_decl.allargnames) ) class BlockTag(Tag): - __keyword__ = 'block' + __keyword__ = "block" def __init__(self, keyword, attributes, **kwargs): - expressions = ['buffered', 'cached', 'args'] + [ - c for c in attributes if c.startswith('cache_')] + expressions = ["buffered", "cached", "args"] + [ + c for c in attributes if c.startswith("cache_") + ] super(BlockTag, self).__init__( keyword, attributes, expressions, - ('name', 'filter', 'decorator'), + ("name", "filter", "decorator"), (), - **kwargs) - name = attributes.get('name') - if name and not re.match(r'^[\w_]+$', name): + **kwargs + ) + name = attributes.get("name") + if name and not re.match(r"^[\w_]+$", name): raise exceptions.CompileException( "%block may not specify an argument signature", - **self.exception_kwargs) - if not name and attributes.get('args', None): - raise exceptions.CompileException( - "Only named %blocks may specify args", **self.exception_kwargs ) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + if not name and attributes.get("args", None): + raise exceptions.CompileException( + "Only named %blocks may specify args", **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) self.name = name - self.decorator = attributes.get('decorator', '') + self.decorator = attributes.get("decorator", "") self.filter_args = ast.ArgumentList( - attributes.get('filter', ''), - **self.exception_kwargs) + attributes.get("filter", ""), **self.exception_kwargs + ) is_block = True @@ -517,7 +557,7 @@ class BlockTag(Tag): @property def funcname(self): - return self.name or "__M_anon_%d" % (self.lineno, ) + return self.name or "__M_anon_%d" % (self.lineno,) def get_argument_expressions(self, **kw): return self.body_decl.get_argument_expressions(**kw) @@ -526,91 +566,100 @@ class BlockTag(Tag): return self.body_decl.allargnames def undeclared_identifiers(self): - return (self.filter_args. - undeclared_identifiers. - difference(filters.DEFAULT_ESCAPES.keys()) - ).union(self.expression_undeclared_identifiers) + return ( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ).union(self.expression_undeclared_identifiers) class CallTag(Tag): - __keyword__ = 'call' + __keyword__ = "call" def __init__(self, keyword, attributes, **kwargs): - super(CallTag, self).__init__(keyword, attributes, - ('args'), ('expr',), ('expr',), **kwargs) - self.expression = attributes['expr'] + super(CallTag, self).__init__( + keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs + ) + self.expression = attributes["expr"] self.code = ast.PythonCode(self.expression, **self.exception_kwargs) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.code.declared_identifiers.union(self.body_decl.allargnames) def undeclared_identifiers(self): - return self.code.undeclared_identifiers.\ - difference(self.code.declared_identifiers) + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) class CallNamespaceTag(Tag): - def __init__(self, namespace, defname, attributes, **kwargs): super(CallNamespaceTag, self).__init__( namespace + ":" + defname, attributes, - tuple(attributes.keys()) + ('args', ), + tuple(attributes.keys()) + ("args",), (), (), - **kwargs) + **kwargs + ) self.expression = "%s.%s(%s)" % ( namespace, defname, - ",".join(["%s=%s" % (k, v) for k, v in - self.parsed_attributes.items() - if k != 'args']) + ",".join( + [ + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" + ] + ), ) self.code = ast.PythonCode(self.expression, **self.exception_kwargs) self.body_decl = ast.FunctionArgs( - attributes.get('args', ''), - **self.exception_kwargs) + attributes.get("args", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.code.declared_identifiers.union(self.body_decl.allargnames) def undeclared_identifiers(self): - return self.code.undeclared_identifiers.\ - difference(self.code.declared_identifiers) + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) class InheritTag(Tag): - __keyword__ = 'inherit' + __keyword__ = "inherit" def __init__(self, keyword, attributes, **kwargs): super(InheritTag, self).__init__( - keyword, attributes, - ('file',), (), ('file',), **kwargs) + keyword, attributes, ("file",), (), ("file",), **kwargs + ) class PageTag(Tag): - __keyword__ = 'page' + __keyword__ = "page" def __init__(self, keyword, attributes, **kwargs): - expressions = \ - ['cached', 'args', 'expression_filter', 'enable_loop'] + \ - [c for c in attributes if c.startswith('cache_')] + expressions = [ + "cached", + "args", + "expression_filter", + "enable_loop", + ] + [c for c in attributes if c.startswith("cache_")] super(PageTag, self).__init__( - keyword, - attributes, - expressions, - (), - (), - **kwargs) - self.body_decl = ast.FunctionArgs(attributes.get('args', ''), - **self.exception_kwargs) + keyword, attributes, expressions, (), (), **kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) self.filter_args = ast.ArgumentList( - attributes.get('expression_filter', ''), - **self.exception_kwargs) + attributes.get("expression_filter", ""), **self.exception_kwargs + ) def declared_identifiers(self): return self.body_decl.allargnames diff --git a/mako/pygen.py b/mako/pygen.py index 8514e02..70665f6 100644 --- a/mako/pygen.py +++ b/mako/pygen.py @@ -7,11 +7,11 @@ """utilities for generating and formatting literal Python code.""" import re + from mako import exceptions class PythonPrinter(object): - def __init__(self, stream): # indentation counter self.indent = 0 @@ -60,7 +60,7 @@ class PythonPrinter(object): The indentation of the total block of lines will be adjusted to that of the current indent level.""" self.in_indent_lines = False - for l in re.split(r'\r?\n', block): + for l in re.split(r"\r?\n", block): self.line_buffer.append(l) self._update_lineno(1) @@ -83,21 +83,18 @@ class PythonPrinter(object): self.in_indent_lines = True if ( - line is None or - re.match(r"^\s*#", line) or - re.match(r"^\s*$", line) + line is None + or re.match(r"^\s*#", line) + or re.match(r"^\s*$", line) ): hastext = False else: hastext = True - is_comment = line and len(line) and line[0] == '#' + is_comment = line and len(line) and line[0] == "#" # see if this line should decrease the indentation level - if ( - not is_comment and - (not hastext or self._is_unindentor(line)) - ): + if not is_comment and (not hastext or self._is_unindentor(line)): if self.indent > 0: self.indent -= 1 @@ -106,7 +103,8 @@ class PythonPrinter(object): # module wont compile. if len(self.indent_detail) == 0: raise exceptions.SyntaxException( - "Too many whitespace closures") + "Too many whitespace closures" + ) self.indent_detail.pop() if line is None: @@ -136,8 +134,9 @@ class PythonPrinter(object): # its not a "compound" keyword. but lets also # test for valid Python keywords that might be indenting us, # else assume its a non-indenting line - m2 = re.match(r"^\s*(def|class|else|elif|except|finally)", - line) + m2 = re.match( + r"^\s*(def|class|else|elif|except|finally)", line + ) if m2: self.indent += 1 self.indent_detail.append(indentor) @@ -189,14 +188,15 @@ class PythonPrinter(object): # return False - def _indent_line(self, line, stripspace=''): + def _indent_line(self, line, stripspace=""): """indent the given line according to the current indent level. stripspace is a string of space that will be truncated from the start of the line before indenting.""" - return re.sub(r"^%s" % stripspace, self.indentstring - * self.indent, line) + return re.sub( + r"^%s" % stripspace, self.indentstring * self.indent, line + ) def _reset_multi_line_flags(self): """reset the flags which would indicate we are in a backslashed @@ -214,7 +214,7 @@ class PythonPrinter(object): # a literal multiline string with unfortunately placed # whitespace - current_state = (self.backslashed or self.triplequoted) + current_state = self.backslashed or self.triplequoted if re.search(r"\\$", line): self.backslashed = True @@ -251,7 +251,7 @@ def adjust_whitespace(text): (backslashed, triplequoted) = (0, 1) def in_multi_line(line): - start_state = (state[backslashed] or state[triplequoted]) + start_state = state[backslashed] or state[triplequoted] if re.search(r"\\$", line): state[backslashed] = True @@ -261,7 +261,7 @@ def adjust_whitespace(text): def match(reg, t): m = re.match(reg, t) if m: - return m, t[len(m.group(0)):] + return m, t[len(m.group(0)) :] else: return None, t @@ -273,7 +273,7 @@ def adjust_whitespace(text): else: m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) else: - m, line = match(r'#', line) + m, line = match(r"#", line) if m: return start_state @@ -286,13 +286,13 @@ def adjust_whitespace(text): return start_state - def _indent_line(line, stripspace=''): - return re.sub(r"^%s" % stripspace, '', line) + def _indent_line(line, stripspace=""): + return re.sub(r"^%s" % stripspace, "", line) lines = [] stripspace = None - for line in re.split(r'\r?\n', text): + for line in re.split(r"\r?\n", text): if in_multi_line(line): lines.append(line) else: diff --git a/mako/pyparser.py b/mako/pyparser.py index 15d0da6..c171897 100644 --- a/mako/pyparser.py +++ b/mako/pyparser.py @@ -10,46 +10,52 @@ Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler module is used. """ -from mako import exceptions, util, compat -from mako.compat import arg_stringname import operator +import _ast + +from mako import _ast_util +from mako import compat +from mako import exceptions +from mako import util +from mako.compat import arg_stringname + if compat.py3k: # words that cannot be assigned to (notably # smaller than the total keys in __builtins__) - reserved = set(['True', 'False', 'None', 'print']) + reserved = set(["True", "False", "None", "print"]) # the "id" attribute on a function node - arg_id = operator.attrgetter('arg') + arg_id = operator.attrgetter("arg") else: # words that cannot be assigned to (notably # smaller than the total keys in __builtins__) - reserved = set(['True', 'False', 'None']) + reserved = set(["True", "False", "None"]) # the "id" attribute on a function node - arg_id = operator.attrgetter('id') + arg_id = operator.attrgetter("id") -import _ast util.restore__ast(_ast) -from mako import _ast_util -def parse(code, mode='exec', **exception_kwargs): +def parse(code, mode="exec", **exception_kwargs): """Parse an expression into AST""" try: - return _ast_util.parse(code, '', mode) + return _ast_util.parse(code, "", mode) except Exception: raise exceptions.SyntaxException( - "(%s) %s (%r)" % ( + "(%s) %s (%r)" + % ( compat.exception_as().__class__.__name__, compat.exception_as(), - code[0:50] - ), **exception_kwargs) + code[0:50], + ), + **exception_kwargs + ) class FindIdentifiers(_ast_util.NodeVisitor): - def __init__(self, listener, **exception_kwargs): self.in_function = False self.in_assign_targets = False @@ -119,9 +125,9 @@ class FindIdentifiers(_ast_util.NodeVisitor): self.in_function = True local_ident_stack = self.local_ident_stack - self.local_ident_stack = local_ident_stack.union([ - arg_id(arg) for arg in self._expand_tuples(node.args.args) - ]) + self.local_ident_stack = local_ident_stack.union( + [arg_id(arg) for arg in self._expand_tuples(node.args.args)] + ) if islambda: self.visit(node.body) else: @@ -146,9 +152,11 @@ class FindIdentifiers(_ast_util.NodeVisitor): # this is eqiuvalent to visit_AssName in # compiler self._add_declared(node.id) - elif node.id not in reserved and node.id \ - not in self.listener.declared_identifiers and node.id \ - not in self.local_ident_stack: + elif ( + node.id not in reserved + and node.id not in self.listener.declared_identifiers + and node.id not in self.local_ident_stack + ): self.listener.undeclared_identifiers.add(node.id) def visit_Import(self, node): @@ -156,24 +164,25 @@ class FindIdentifiers(_ast_util.NodeVisitor): if name.asname is not None: self._add_declared(name.asname) else: - self._add_declared(name.name.split('.')[0]) + self._add_declared(name.name.split(".")[0]) def visit_ImportFrom(self, node): for name in node.names: if name.asname is not None: self._add_declared(name.asname) else: - if name.name == '*': + if name.name == "*": raise exceptions.CompileException( "'import *' is not supported, since all identifier " "names must be explicitly declared. Please use the " "form 'from import , , " - "...' instead.", **self.exception_kwargs) + "...' instead.", + **self.exception_kwargs + ) self._add_declared(name.name) class FindTuple(_ast_util.NodeVisitor): - def __init__(self, listener, code_factory, **exception_kwargs): self.listener = listener self.exception_kwargs = exception_kwargs @@ -184,16 +193,17 @@ class FindTuple(_ast_util.NodeVisitor): p = self.code_factory(n, **self.exception_kwargs) self.listener.codeargs.append(p) self.listener.args.append(ExpressionGenerator(n).value()) - self.listener.declared_identifiers = \ - self.listener.declared_identifiers.union( - p.declared_identifiers) - self.listener.undeclared_identifiers = \ - self.listener.undeclared_identifiers.union( - p.undeclared_identifiers) + ldi = self.listener.declared_identifiers + self.listener.declared_identifiers = ldi.union( + p.declared_identifiers + ) + lui = self.listener.undeclared_identifiers + self.listener.undeclared_identifiers = lui.union( + p.undeclared_identifiers + ) class ParseFunc(_ast_util.NodeVisitor): - def __init__(self, listener, **exception_kwargs): self.listener = listener self.exception_kwargs = exception_kwargs @@ -224,10 +234,9 @@ class ParseFunc(_ast_util.NodeVisitor): class ExpressionGenerator(object): - def __init__(self, astnode): - self.generator = _ast_util.SourceGenerator(' ' * 4) + self.generator = _ast_util.SourceGenerator(" " * 4) self.generator.visit(astnode) def value(self): - return ''.join(self.generator.result) + return "".join(self.generator.result) diff --git a/mako/runtime.py b/mako/runtime.py index 769541c..8c4f78c 100644 --- a/mako/runtime.py +++ b/mako/runtime.py @@ -7,10 +7,13 @@ """provides runtime services for templates, including Context, Namespace, and various helper functions.""" -from mako import exceptions, util, compat -from mako.compat import compat_builtins import sys +from mako import compat +from mako import exceptions +from mako import util +from mako.compat import compat_builtins + class Context(object): @@ -34,18 +37,19 @@ class Context(object): # "capture" function which proxies to the # generic "capture" function - self._data['capture'] = compat.partial(capture, self) + self._data["capture"] = compat.partial(capture, self) # "caller" stack used by def calls with content - self.caller_stack = self._data['caller'] = CallerStack() + self.caller_stack = self._data["caller"] = CallerStack() def _set_with_template(self, t): self._with_template = t illegal_names = t.reserved_names.intersection(self._data) if illegal_names: raise exceptions.NameConflictError( - "Reserved words passed to render(): %s" % - ", ".join(illegal_names)) + "Reserved words passed to render(): %s" + % ", ".join(illegal_names) + ) @property def lookup(self): @@ -177,14 +181,13 @@ class Context(object): c = self._copy() x = c._data - x.pop('self', None) - x.pop('parent', None) - x.pop('next', None) + x.pop("self", None) + x.pop("parent", None) + x.pop("next", None) return c class CallerStack(list): - def __init__(self): self.nextcaller = None @@ -231,6 +234,7 @@ class Undefined(object): def __bool__(self): return False + UNDEFINED = Undefined() STOP_RENDERING = "" @@ -342,7 +346,6 @@ class LoopContext(object): class _NSAttr(object): - def __init__(self, parent): self.__parent = parent @@ -373,9 +376,15 @@ class Namespace(object): """ - def __init__(self, name, context, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -473,9 +482,12 @@ class Namespace(object): if key in self.context.namespaces: return self.context.namespaces[key] else: - ns = TemplateNamespace(uri, self.context._copy(), - templateuri=uri, - calling_uri=self._templateuri) + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) self.context.namespaces[key] = ns return ns @@ -518,7 +530,7 @@ class Namespace(object): def _populate(self, d, l): for ident in l: - if ident == '*': + if ident == "*": for (k, v) in self._get_star(): d[k] = v else: @@ -536,8 +548,8 @@ class Namespace(object): val = getattr(self.inherits, key) else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -546,9 +558,17 @@ class TemplateNamespace(Namespace): """A :class:`.Namespace` specific to a :class:`.Template` instance.""" - def __init__(self, name, context, template=None, templateuri=None, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + template=None, + templateuri=None, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -556,8 +576,7 @@ class TemplateNamespace(Namespace): self.callables = dict([(c.__name__, c) for c in callables]) if templateuri is not None: - self.template = _lookup_template(context, templateuri, - calling_uri) + self.template = _lookup_template(context, templateuri, calling_uri) self._templateuri = self.template.module._template_uri elif template is not None: self.template = template @@ -566,9 +585,9 @@ class TemplateNamespace(Namespace): raise TypeError("'template' argument is required.") if populate_self: - lclcallable, lclcontext = \ - _populate_self_namespace(context, self.template, - self_ns=self) + lclcallable, lclcontext = _populate_self_namespace( + context, self.template, self_ns=self + ) @property def module(self): @@ -607,6 +626,7 @@ class TemplateNamespace(Namespace): def get(key): callable_ = self.template._get_def_callable(key) return compat.partial(callable_, self.context) + for k in self.template.module._exports: yield (k, get(k)) @@ -621,8 +641,8 @@ class TemplateNamespace(Namespace): else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -631,9 +651,16 @@ class ModuleNamespace(Namespace): """A :class:`.Namespace` specific to a Python module instance.""" - def __init__(self, name, context, module, - callables=None, inherits=None, - populate_self=True, calling_uri=None): + def __init__( + self, + name, + context, + module, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): self.name = name self.context = context self.inherits = inherits @@ -641,7 +668,7 @@ class ModuleNamespace(Namespace): self.callables = dict([(c.__name__, c) for c in callables]) mod = __import__(module) - for token in module.split('.')[1:]: + for token in module.split(".")[1:]: mod = getattr(mod, token) self.module = mod @@ -657,7 +684,7 @@ class ModuleNamespace(Namespace): for key in self.callables: yield (key, self.callables[key]) for key in dir(self.module): - if key[0] != '_': + if key[0] != "_": callable_ = getattr(self.module, key) if compat.callable(callable_): yield key, compat.partial(callable_, self.context) @@ -672,8 +699,8 @@ class ModuleNamespace(Namespace): val = getattr(self.inherits, key) else: raise AttributeError( - "Namespace '%s' has no member '%s'" % - (self.name, key)) + "Namespace '%s' has no member '%s'" % (self.name, key) + ) setattr(self, key, val) return val @@ -692,6 +719,7 @@ def supports_caller(func): return func(context, *args, **kwargs) finally: context.caller_stack._pop_frame() + return wrap_stackframe @@ -721,13 +749,16 @@ def _decorate_toplevel(fn): def go(context, *args, **kw): def y(*args, **kw): return render_fn(context, *args, **kw) + try: y.__name__ = render_fn.__name__[7:] except TypeError: # < Python 2.4 pass return fn(y)(context, *args, **kw) + return go + return decorate_render @@ -737,7 +768,9 @@ def _decorate_inline(context, fn): def go(*args, **kw): return dec(context, *args, **kw) + return go + return decorate_render @@ -747,8 +780,8 @@ def _include_file(context, uri, calling_uri, **kwargs): template = _lookup_template(context, uri, calling_uri) (callable_, ctx) = _populate_self_namespace( - context._clean_inheritance_tokens(), - template) + context._clean_inheritance_tokens(), template + ) kwargs = _kwargs_for_include(callable_, context._data, **kwargs) if template.include_error_handler: try: @@ -769,23 +802,25 @@ def _inherit_from(context, uri, calling_uri): if uri is None: return None template = _lookup_template(context, uri, calling_uri) - self_ns = context['self'] + self_ns = context["self"] ih = self_ns while ih.inherits is not None: ih = ih.inherits - lclcontext = context._locals({'next': ih}) - ih.inherits = TemplateNamespace("self:%s" % template.uri, - lclcontext, - template=template, - populate_self=False) - context._data['parent'] = lclcontext._data['local'] = ih.inherits - callable_ = getattr(template.module, '_mako_inherit', None) + lclcontext = context._locals({"next": ih}) + ih.inherits = TemplateNamespace( + "self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False, + ) + context._data["parent"] = lclcontext._data["local"] = ih.inherits + callable_ = getattr(template.module, "_mako_inherit", None) if callable_ is not None: ret = callable_(template, lclcontext) if ret: return ret - gen_ns = getattr(template.module, '_mako_generate_namespaces', None) + gen_ns = getattr(template.module, "_mako_generate_namespaces", None) if gen_ns is not None: gen_ns(context) return (template.callable_, lclcontext) @@ -795,8 +830,9 @@ def _lookup_template(context, uri, relativeto): lookup = context._with_template.lookup if lookup is None: raise exceptions.TemplateLookupException( - "Template '%s' has no TemplateLookup associated" % - context._with_template.uri) + "Template '%s' has no TemplateLookup associated" + % context._with_template.uri + ) uri = lookup.adjust_uri(uri, relativeto) try: return lookup.get_template(uri) @@ -806,11 +842,14 @@ def _lookup_template(context, uri, relativeto): def _populate_self_namespace(context, template, self_ns=None): if self_ns is None: - self_ns = TemplateNamespace('self:%s' % template.uri, - context, template=template, - populate_self=False) - context._data['self'] = context._data['local'] = self_ns - if hasattr(template.module, '_mako_inherit'): + self_ns = TemplateNamespace( + "self:%s" % template.uri, + context, + template=template, + populate_self=False, + ) + context._data["self"] = context._data["local"] = self_ns + if hasattr(template.module, "_mako_inherit"): ret = template.module._mako_inherit(template, context) if ret: return ret @@ -829,13 +868,19 @@ def _render(template, callable_, args, data, as_unicode=False): buf = util.FastEncodingBuffer( as_unicode=as_unicode, encoding=template.output_encoding, - errors=template.encoding_errors) + errors=template.encoding_errors, + ) context = Context(buf, **data) context._outputting_as_unicode = as_unicode context._set_with_template(template) - _render_context(template, callable_, context, *args, - **_kwargs_for_callable(callable_, data)) + _render_context( + template, + callable_, + context, + *args, + **_kwargs_for_callable(callable_, data) + ) return context._pop_buffer().getvalue() @@ -849,7 +894,7 @@ def _kwargs_for_callable(callable_, data): namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] kwargs = {} for arg in namedargs: - if arg != 'context' and arg in data and arg not in kwargs: + if arg != "context" and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs @@ -858,13 +903,14 @@ def _kwargs_for_include(callable_, data, **kwargs): argspec = compat.inspect_func_args(callable_) namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] for arg in namedargs: - if arg != 'context' and arg in data and arg not in kwargs: + if arg != "context" and arg in data and arg not in kwargs: kwargs[arg] = data[arg] return kwargs def _render_context(tmpl, callable_, context, *args, **kwargs): import mako.template as template + # create polymorphic 'self' namespace for this # template with possibly updated context if not isinstance(tmpl, template.DefTemplate): @@ -886,8 +932,9 @@ def _exec_template(callable_, context, args=None, kwargs=None): be interpreted here. """ template = context._with_template - if template is not None and \ - (template.format_exceptions or template.error_handler): + if template is not None and ( + template.format_exceptions or template.error_handler + ): try: callable_(context, *args, **kwargs) except Exception: @@ -908,11 +955,15 @@ def _render_error(template, context, error): error_template = exceptions.html_error_template() if context._outputting_as_unicode: context._buffer_stack[:] = [ - util.FastEncodingBuffer(as_unicode=True)] + util.FastEncodingBuffer(as_unicode=True) + ] else: - context._buffer_stack[:] = [util.FastEncodingBuffer( - error_template.output_encoding, - error_template.encoding_errors)] + context._buffer_stack[:] = [ + util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors, + ) + ] context._set_with_template(error_template) error_template.render_context(context, error=error) diff --git a/mako/template.py b/mako/template.py index 329632c..89d2105 100644 --- a/mako/template.py +++ b/mako/template.py @@ -7,8 +7,6 @@ """Provides the Template class, a facade for parsing, generating and executing template strings, as well as template runtime operations.""" -from mako.lexer import Lexer -from mako import runtime, util, exceptions, codegen, cache, compat import os import re import shutil @@ -18,6 +16,14 @@ import tempfile import types import weakref +from mako import cache +from mako import codegen +from mako import compat +from mako import exceptions +from mako import runtime +from mako import util +from mako.lexer import Lexer + class Template(object): @@ -230,41 +236,43 @@ class Template(object): lexer_cls = Lexer - def __init__(self, - text=None, - filename=None, - uri=None, - format_exceptions=False, - error_handler=None, - lookup=None, - output_encoding=None, - encoding_errors='strict', - module_directory=None, - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - module_filename=None, - input_encoding=None, - disable_unicode=False, - module_writer=None, - bytestring_passthrough=False, - default_filters=None, - buffer_filters=(), - strict_undefined=False, - imports=None, - future_imports=None, - enable_loop=True, - preprocessor=None, - lexer_cls=None, - include_error_handler=None): + def __init__( + self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors="strict", + module_directory=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + disable_unicode=False, + module_writer=None, + bytestring_passthrough=False, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): if uri: - self.module_id = re.sub(r'\W', "_", uri) + self.module_id = re.sub(r"\W", "_", uri) self.uri = uri elif filename: - self.module_id = re.sub(r'\W', "_", filename) + self.module_id = re.sub(r"\W", "_", filename) drive, path = os.path.splitdrive(filename) path = os.path.normpath(path).replace(os.path.sep, "/") self.uri = path @@ -278,9 +286,10 @@ class Template(object): u_norm = os.path.normpath(u_norm) if u_norm.startswith(".."): raise exceptions.TemplateLookupException( - "Template uri \"%s\" is invalid - " + 'Template uri "%s" is invalid - ' "it cannot be relative outside " - "of the root path." % self.uri) + "of the root path." % self.uri + ) self.input_encoding = input_encoding self.output_encoding = output_encoding @@ -293,17 +302,18 @@ class Template(object): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) elif output_encoding and disable_unicode: raise exceptions.UnsupportedError( "output_encoding must be set to " - "None when disable_unicode is used.") + "None when disable_unicode is used." + ) if default_filters is None: if compat.py3k or self.disable_unicode: - self.default_filters = ['str'] + self.default_filters = ["str"] else: - self.default_filters = ['unicode'] + self.default_filters = ["unicode"] else: self.default_filters = default_filters self.buffer_filters = buffer_filters @@ -329,8 +339,7 @@ class Template(object): elif module_directory is not None: path = os.path.abspath( os.path.join( - os.path.normpath(module_directory), - u_norm + ".py" + os.path.normpath(module_directory), u_norm + ".py" ) ) else: @@ -338,7 +347,8 @@ class Template(object): module = self._compile_from_file(path, filename) else: raise exceptions.RuntimeException( - "Template requires text or filename") + "Template requires text or filename" + ) self.module = module self.filename = filename @@ -351,8 +361,12 @@ class Template(object): self.module_directory = module_directory self._setup_cache_args( - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, ) @util.memoized_property @@ -360,11 +374,17 @@ class Template(object): if self.enable_loop: return codegen.RESERVED_NAMES else: - return codegen.RESERVED_NAMES.difference(['loop']) - - def _setup_cache_args(self, - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url): + return codegen.RESERVED_NAMES.difference(["loop"]) + + def _setup_cache_args( + self, + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ): self.cache_impl = cache_impl self.cache_enabled = cache_enabled if cache_args: @@ -374,35 +394,31 @@ class Template(object): # transfer deprecated cache_* args if cache_type: - self.cache_args['type'] = cache_type + self.cache_args["type"] = cache_type if cache_dir: - self.cache_args['dir'] = cache_dir + self.cache_args["dir"] = cache_dir if cache_url: - self.cache_args['url'] = cache_url + self.cache_args["url"] = cache_url def _compile_from_file(self, path, filename): if path is not None: util.verify_directory(os.path.dirname(path)) filemtime = os.stat(filename)[stat.ST_MTIME] - if not os.path.exists(path) or \ - os.stat(path)[stat.ST_MTIME] < filemtime: + if ( + not os.path.exists(path) + or os.stat(path)[stat.ST_MTIME] < filemtime + ): data = util.read_file(filename) _compile_module_file( - self, - data, - filename, - path, - self.module_writer) + self, data, filename, path, self.module_writer + ) module = compat.load_module(self.module_id, path) del sys.modules[self.module_id] if module._magic_number != codegen.MAGIC_NUMBER: data = util.read_file(filename) _compile_module_file( - self, - data, - filename, - path, - self.module_writer) + self, data, filename, path, self.module_writer + ) module = compat.load_module(self.module_id, path) del sys.modules[self.module_id] ModuleInfo(module, path, self, filename, None, None) @@ -410,10 +426,7 @@ class Template(object): # template filename and no module directory, compile code # in memory data = util.read_file(filename) - code, module = _compile_text( - self, - data, - filename) + code, module = _compile_text(self, data, filename) self._source = None self._code = code ModuleInfo(module, None, self, filename, code, None) @@ -437,15 +450,15 @@ class Template(object): @property def cache_dir(self): - return self.cache_args['dir'] + return self.cache_args["dir"] @property def cache_url(self): - return self.cache_args['url'] + return self.cache_args["url"] @property def cache_type(self): - return self.cache_args['type'] + return self.cache_args["type"] def render(self, *args, **data): """Render the output of this template as a string. @@ -464,11 +477,9 @@ class Template(object): def render_unicode(self, *args, **data): """Render the output of this template as a unicode object.""" - return runtime._render(self, - self.callable_, - args, - data, - as_unicode=True) + return runtime._render( + self, self.callable_, args, data, as_unicode=True + ) def render_context(self, context, *args, **kwargs): """Render this :class:`.Template` with the given context. @@ -476,13 +487,9 @@ class Template(object): The data is written to the context's buffer. """ - if getattr(context, '_with_template', None) is None: + if getattr(context, "_with_template", None) is None: context._set_with_template(self) - runtime._render_context(self, - self.callable_, - context, - *args, - **kwargs) + runtime._render_context(self, self.callable_, context, *args, **kwargs) def has_def(self, name): return hasattr(self.module, "render_%s" % name) @@ -498,7 +505,7 @@ class Template(object): .. versionadded:: 1.0.4 """ - return [i[7:] for i in dir(self.module) if i[:7] == 'render_'] + return [i[7:] for i in dir(self.module) if i[:7] == "render_"] def _get_def_callable(self, name): return getattr(self.module, "render_%s" % name) @@ -526,28 +533,30 @@ class ModuleTemplate(Template): """ - def __init__(self, module, - module_filename=None, - template=None, - template_filename=None, - module_source=None, - template_source=None, - output_encoding=None, - encoding_errors='strict', - disable_unicode=False, - bytestring_passthrough=False, - format_exceptions=False, - error_handler=None, - lookup=None, - cache_args=None, - cache_impl='beaker', - cache_enabled=True, - cache_type=None, - cache_dir=None, - cache_url=None, - include_error_handler=None, - ): - self.module_id = re.sub(r'\W', "_", module._template_uri) + def __init__( + self, + module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors="strict", + disable_unicode=False, + bytestring_passthrough=False, + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + include_error_handler=None, + ): + self.module_id = re.sub(r"\W", "_", module._template_uri) self.uri = module._template_uri self.input_encoding = module._source_encoding self.output_encoding = output_encoding @@ -558,21 +567,24 @@ class ModuleTemplate(Template): if compat.py3k and disable_unicode: raise exceptions.UnsupportedError( - "Mako for Python 3 does not " - "support disabling Unicode") + "Mako for Python 3 does not " "support disabling Unicode" + ) elif output_encoding and disable_unicode: raise exceptions.UnsupportedError( "output_encoding must be set to " - "None when disable_unicode is used.") + "None when disable_unicode is used." + ) self.module = module self.filename = template_filename - ModuleInfo(module, - module_filename, - self, - template_filename, - module_source, - template_source) + ModuleInfo( + module, + module_filename, + self, + template_filename, + module_source, + template_source, + ) self.callable_ = self.module.render_body self.format_exceptions = format_exceptions @@ -580,8 +592,12 @@ class ModuleTemplate(Template): self.include_error_handler = include_error_handler self.lookup = lookup self._setup_cache_args( - cache_impl, cache_enabled, cache_args, - cache_type, cache_dir, cache_url + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, ) @@ -614,15 +630,18 @@ class ModuleInfo(object): source code based on a module's identifier. """ + _modules = weakref.WeakValueDictionary() - def __init__(self, - module, - module_filename, - template, - template_filename, - module_source, - template_source): + def __init__( + self, + module, + module_filename, + template, + template_filename, + module_source, + template_source, + ): self.module = module self.module_filename = module_filename self.template_filename = template_filename @@ -635,15 +654,15 @@ class ModuleInfo(object): @classmethod def get_module_source_metadata(cls, module_source, full_line_map=False): source_map = re.search( - r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", - module_source, re.S).group(1) + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S + ).group(1) source_map = compat.json.loads(source_map) - source_map['line_map'] = dict( - (int(k), int(v)) - for k, v in source_map['line_map'].items()) + source_map["line_map"] = dict( + (int(k), int(v)) for k, v in source_map["line_map"].items() + ) if full_line_map: - f_line_map = source_map['full_line_map'] = [] - line_map = source_map['line_map'] + f_line_map = source_map["full_line_map"] = [] + line_map = source_map["line_map"] curr_templ_line = 1 for mod_line in range(1, max(line_map)): @@ -662,10 +681,12 @@ class ModuleInfo(object): @property def source(self): if self.template_source is not None: - if self.module._source_encoding and \ - not isinstance(self.template_source, compat.text_type): + if self.module._source_encoding and not isinstance( + self.template_source, compat.text_type + ): return self.template_source.decode( - self.module._source_encoding) + self.module._source_encoding + ) else: return self.template_source else: @@ -677,38 +698,46 @@ class ModuleInfo(object): def _compile(template, text, filename, generate_magic_comment): - lexer = template.lexer_cls(text, - filename, - disable_unicode=template.disable_unicode, - input_encoding=template.input_encoding, - preprocessor=template.preprocessor) + lexer = template.lexer_cls( + text, + filename, + disable_unicode=template.disable_unicode, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor, + ) node = lexer.parse() - source = codegen.compile(node, - template.uri, - filename, - default_filters=template.default_filters, - buffer_filters=template.buffer_filters, - imports=template.imports, - future_imports=template.future_imports, - source_encoding=lexer.encoding, - generate_magic_comment=generate_magic_comment, - disable_unicode=template.disable_unicode, - strict_undefined=template.strict_undefined, - enable_loop=template.enable_loop, - reserved_names=template.reserved_names) + source = codegen.compile( + node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + disable_unicode=template.disable_unicode, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names, + ) return source, lexer def _compile_text(template, text, filename): identifier = template.module_id - source, lexer = _compile(template, text, filename, - generate_magic_comment=template.disable_unicode) + source, lexer = _compile( + template, + text, + filename, + generate_magic_comment=template.disable_unicode, + ) cid = identifier if not compat.py3k and isinstance(cid, compat.text_type): cid = cid.encode() module = types.ModuleType(cid) - code = compile(source, cid, 'exec') + code = compile(source, cid, "exec") # this exec() works for 2.4->3.3. exec(code, module.__dict__, module.__dict__) @@ -716,11 +745,12 @@ def _compile_text(template, text, filename): def _compile_module_file(template, text, filename, outputpath, module_writer): - source, lexer = _compile(template, text, filename, - generate_magic_comment=True) + source, lexer = _compile( + template, text, filename, generate_magic_comment=True + ) if isinstance(source, compat.text_type): - source = source.encode(lexer.encoding or 'ascii') + source = source.encode(lexer.encoding or "ascii") if module_writer: module_writer(source, outputpath) @@ -737,9 +767,9 @@ def _compile_module_file(template, text, filename, outputpath, module_writer): def _get_module_info_from_callable(callable_): if compat.py3k: - return _get_module_info(callable_.__globals__['__name__']) + return _get_module_info(callable_.__globals__["__name__"]) else: - return _get_module_info(callable_.func_globals['__name__']) + return _get_module_info(callable_.func_globals["__name__"]) def _get_module_info(filename): diff --git a/mako/util.py b/mako/util.py index 2f089ff..cee911b 100644 --- a/mako/util.py +++ b/mako/util.py @@ -4,12 +4,13 @@ # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -import re -import collections import codecs +import collections +import operator import os +import re + from mako import compat -import operator def update_wrapper(decorated, fn): @@ -19,7 +20,6 @@ def update_wrapper(decorated, fn): class PluginLoader(object): - def __init__(self, group): self.group = group self.impls = {} @@ -29,16 +29,16 @@ class PluginLoader(object): return self.impls[name]() else: import pkg_resources - for impl in pkg_resources.iter_entry_points( - self.group, - name): + + for impl in pkg_resources.iter_entry_points(self.group, name): self.impls[name] = impl.load return impl.load() else: from mako import exceptions + raise exceptions.RuntimeException( - "Can't load plugin %s %s" % - (self.group, name)) + "Can't load plugin %s %s" % (self.group, name) + ) def register(self, name, modulepath, objname): def load(): @@ -46,18 +46,19 @@ class PluginLoader(object): for token in modulepath.split(".")[1:]: mod = getattr(mod, token) return getattr(mod, objname) + self.impls[name] = load -def verify_directory(dir): +def verify_directory(dir_): """create and/or verify a filesystem directory.""" tries = 0 - while not os.path.exists(dir): + while not os.path.exists(dir_): try: tries += 1 - os.makedirs(dir, compat.octal("0775")) + os.makedirs(dir_, compat.octal("0775")) except: if tries > 5: raise @@ -109,11 +110,15 @@ class memoized_instancemethod(object): def oneshot(*args, **kw): result = self.fget(obj, *args, **kw) - memo = lambda *a, **kw: result + + def memo(*a, **kw): + return result + memo.__name__ = self.__name__ memo.__doc__ = self.__doc__ obj.__dict__[self.__name__] = memo return result + oneshot.__name__ = self.__name__ oneshot.__doc__ = self.__doc__ return oneshot @@ -137,13 +142,13 @@ class FastEncodingBuffer(object): """a very rudimentary buffer that is faster than StringIO, but doesn't crash on unicode data like cStringIO.""" - def __init__(self, encoding=None, errors='strict', as_unicode=False): + def __init__(self, encoding=None, errors="strict", as_unicode=False): self.data = collections.deque() self.encoding = encoding if as_unicode: - self.delim = compat.u('') + self.delim = compat.u("") else: - self.delim = '' + self.delim = "" self.as_unicode = as_unicode self.errors = errors self.write = self.data.append @@ -154,8 +159,9 @@ class FastEncodingBuffer(object): def getvalue(self): if self.encoding: - return self.delim.join(self.data).encode(self.encoding, - self.errors) + return self.delim.join(self.data).encode( + self.encoding, self.errors + ) else: return self.delim.join(self.data) @@ -171,7 +177,6 @@ class LRUCache(dict): """ class _Item(object): - def __init__(self, key, value): self.key = key self.value = value @@ -180,7 +185,7 @@ class LRUCache(dict): def __repr__(self): return repr(self.value) - def __init__(self, capacity, threshold=.5): + def __init__(self, capacity, threshold=0.5): self.capacity = capacity self.threshold = threshold @@ -210,9 +215,12 @@ class LRUCache(dict): def _manage_size(self): while len(self) > self.capacity + self.capacity * self.threshold: - bytime = sorted(dict.values(self), - key=operator.attrgetter('timestamp'), reverse=True) - for item in bytime[self.capacity:]: + bytime = sorted( + dict.values(self), + key=operator.attrgetter("timestamp"), + reverse=True, + ) + for item in bytime[self.capacity :]: try: del self[item.key] except KeyError: @@ -220,10 +228,11 @@ class LRUCache(dict): # broke in on us. loop around and try again break + # Regexp to match python magic encoding line _PYTHON_MAGIC_COMMENT_re = re.compile( - r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)', - re.VERBOSE) + r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE +) def parse_encoding(fp): @@ -242,13 +251,14 @@ def parse_encoding(fp): line1 = fp.readline() has_bom = line1.startswith(codecs.BOM_UTF8) if has_bom: - line1 = line1[len(codecs.BOM_UTF8):] + line1 = line1[len(codecs.BOM_UTF8) :] - m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore')) + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore")) if not m: try: import parser - parser.suite(line1.decode('ascii', 'ignore')) + + parser.suite(line1.decode("ascii", "ignore")) except (ImportError, SyntaxError): # Either it's a real syntax error, in which case the source # is not valid python source, or line2 is a continuation of @@ -258,14 +268,16 @@ def parse_encoding(fp): else: line2 = fp.readline() m = _PYTHON_MAGIC_COMMENT_re.match( - line2.decode('ascii', 'ignore')) + line2.decode("ascii", "ignore") + ) if has_bom: if m: raise SyntaxError( "python refuses to compile code with both a UTF8" - " byte-order-mark and a magic encoding comment") - return 'utf_8' + " byte-order-mark and a magic encoding comment" + ) + return "utf_8" elif m: return m.group(1) else: @@ -289,10 +301,11 @@ def restore__ast(_ast): """Attempt to restore the required classes to the _ast module if it appears to be missing them """ - if hasattr(_ast, 'AST'): + if hasattr(_ast, "AST"): return _ast.PyCF_ONLY_AST = 2 << 9 - m = compile("""\ + m = compile( + """\ def foo(): pass class Bar(object): pass if False: pass @@ -305,13 +318,17 @@ baz = 'mako' baz and 'foo' or 'bar' (mako is baz == baz) is not baz != mako mako > baz < mako >= baz <= mako -mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST) +mako in baz not in mako""", + "", + "exec", + _ast.PyCF_ONLY_AST, + ) _ast.Module = type(m) for cls in _ast.Module.__mro__: - if cls.__name__ == 'mod': + if cls.__name__ == "mod": _ast.mod = cls - elif cls.__name__ == 'AST': + elif cls.__name__ == "AST": _ast.AST = cls _ast.FunctionDef = type(m.body[0]) @@ -361,7 +378,7 @@ mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST) _ast.NotIn = type(m.body[12].value.ops[1]) -def read_file(path, mode='rb'): +def read_file(path, mode="rb"): fp = open(path, mode) try: data = fp.read() -- cgit v1.2.1