From 3d57e14df7ba5f14a634295caf3b2e60da50bfe2 Mon Sep 17 00:00:00 2001 From: James Taylor Date: Fri, 6 Sep 2019 16:31:13 -0700 Subject: Remove windows python distribution from repo and add requirements.txt --- python/jinja2/__init__.py | 83 -- python/jinja2/_compat.py | 99 --- python/jinja2/_identifier.py | 2 - python/jinja2/asyncfilters.py | 146 ---- python/jinja2/asyncsupport.py | 256 ------ python/jinja2/bccache.py | 362 --------- python/jinja2/compiler.py | 1721 ----------------------------------------- python/jinja2/constants.py | 32 - python/jinja2/debug.py | 372 --------- python/jinja2/defaults.py | 56 -- python/jinja2/environment.py | 1276 ------------------------------ python/jinja2/exceptions.py | 146 ---- python/jinja2/ext.py | 627 --------------- python/jinja2/filters.py | 1190 ---------------------------- python/jinja2/idtracking.py | 286 ------- python/jinja2/lexer.py | 739 ------------------ python/jinja2/loaders.py | 481 ------------ python/jinja2/meta.py | 106 --- python/jinja2/nativetypes.py | 220 ------ python/jinja2/nodes.py | 999 ------------------------ python/jinja2/optimizer.py | 49 -- python/jinja2/parser.py | 903 --------------------- python/jinja2/runtime.py | 813 ------------------- python/jinja2/sandbox.py | 486 ------------ python/jinja2/tests.py | 175 ----- python/jinja2/utils.py | 647 ---------------- python/jinja2/visitor.py | 87 --- 27 files changed, 12359 deletions(-) delete mode 100644 python/jinja2/__init__.py delete mode 100644 python/jinja2/_compat.py delete mode 100644 python/jinja2/_identifier.py delete mode 100644 python/jinja2/asyncfilters.py delete mode 100644 python/jinja2/asyncsupport.py delete mode 100644 python/jinja2/bccache.py delete mode 100644 python/jinja2/compiler.py delete mode 100644 python/jinja2/constants.py delete mode 100644 python/jinja2/debug.py delete mode 100644 python/jinja2/defaults.py delete mode 100644 python/jinja2/environment.py delete mode 100644 python/jinja2/exceptions.py delete mode 100644 python/jinja2/ext.py delete mode 100644 python/jinja2/filters.py delete mode 100644 python/jinja2/idtracking.py delete mode 100644 python/jinja2/lexer.py delete mode 100644 python/jinja2/loaders.py delete mode 100644 python/jinja2/meta.py delete mode 100644 python/jinja2/nativetypes.py delete mode 100644 python/jinja2/nodes.py delete mode 100644 python/jinja2/optimizer.py delete mode 100644 python/jinja2/parser.py delete mode 100644 python/jinja2/runtime.py delete mode 100644 python/jinja2/sandbox.py delete mode 100644 python/jinja2/tests.py delete mode 100644 python/jinja2/utils.py delete mode 100644 python/jinja2/visitor.py (limited to 'python/jinja2') diff --git a/python/jinja2/__init__.py b/python/jinja2/__init__.py deleted file mode 100644 index 15e13b6..0000000 --- a/python/jinja2/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2 - ~~~~~~ - - Jinja2 is a template engine written in pure Python. It provides a - Django inspired non-XML syntax but supports inline expressions and - an optional sandboxed environment. - - Nutshell - -------- - - Here a small example of a Jinja2 template:: - - {% extends 'base.html' %} - {% block title %}Memberlist{% endblock %} - {% block content %} - - {% endblock %} - - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -__docformat__ = 'restructuredtext en' -__version__ = '2.10.1' - -# high level interface -from jinja2.environment import Environment, Template - -# loaders -from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \ - DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \ - ModuleLoader - -# bytecode caches -from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \ - MemcachedBytecodeCache - -# undefined types -from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \ - make_logging_undefined - -# exceptions -from jinja2.exceptions import TemplateError, UndefinedError, \ - TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \ - TemplateAssertionError, TemplateRuntimeError - -# decorators and public utilities -from jinja2.filters import environmentfilter, contextfilter, \ - evalcontextfilter -from jinja2.utils import Markup, escape, clear_caches, \ - environmentfunction, evalcontextfunction, contextfunction, \ - is_undefined, select_autoescape - -__all__ = [ - 'Environment', 'Template', 'BaseLoader', 'FileSystemLoader', - 'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader', - 'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache', - 'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined', - 'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound', - 'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError', - 'TemplateRuntimeError', - 'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape', - 'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined', - 'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined', - 'select_autoescape', -] - - -def _patch_async(): - from jinja2.utils import have_async_gen - if have_async_gen: - from jinja2.asyncsupport import patch_all - patch_all() - - -_patch_async() -del _patch_async diff --git a/python/jinja2/_compat.py b/python/jinja2/_compat.py deleted file mode 100644 index 61d8530..0000000 --- a/python/jinja2/_compat.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2._compat - ~~~~~~~~~~~~~~ - - Some py2/py3 compatibility support based on a stripped down - version of six so we don't have to depend on a specific version - of it. - - :copyright: Copyright 2013 by the Jinja team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -import sys - -PY2 = sys.version_info[0] == 2 -PYPY = hasattr(sys, 'pypy_translation_info') -_identity = lambda x: x - - -if not PY2: - unichr = chr - range_type = range - text_type = str - string_types = (str,) - integer_types = (int,) - - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - import pickle - from io import BytesIO, StringIO - NativeStringIO = StringIO - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - ifilter = filter - imap = map - izip = zip - intern = sys.intern - - implements_iterator = _identity - implements_to_string = _identity - encode_filename = _identity - -else: - unichr = unichr - text_type = unicode - range_type = xrange - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - import cPickle as pickle - from cStringIO import StringIO as BytesIO, StringIO - NativeStringIO = BytesIO - - exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') - - from itertools import imap, izip, ifilter - intern = intern - - def implements_iterator(cls): - cls.next = cls.__next__ - del cls.__next__ - return cls - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') - return cls - - def encode_filename(filename): - if isinstance(filename, unicode): - return filename.encode('utf-8') - return filename - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. - class metaclass(type): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -try: - from urllib.parse import quote_from_bytes as url_quote -except ImportError: - from urllib import quote as url_quote diff --git a/python/jinja2/_identifier.py b/python/jinja2/_identifier.py deleted file mode 100644 index 2eac35d..0000000 --- a/python/jinja2/_identifier.py +++ /dev/null @@ -1,2 +0,0 @@ -# generated by scripts/generate_identifier_pattern.py -pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯' diff --git a/python/jinja2/asyncfilters.py b/python/jinja2/asyncfilters.py deleted file mode 100644 index 5c1f46d..0000000 --- a/python/jinja2/asyncfilters.py +++ /dev/null @@ -1,146 +0,0 @@ -from functools import wraps - -from jinja2.asyncsupport import auto_aiter -from jinja2 import filters - - -async def auto_to_seq(value): - seq = [] - if hasattr(value, '__aiter__'): - async for item in value: - seq.append(item) - else: - for item in value: - seq.append(item) - return seq - - -async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): - seq, func = filters.prepare_select_or_reject( - args, kwargs, modfunc, lookup_attr) - if seq: - async for item in auto_aiter(seq): - if func(item): - yield item - - -def dualfilter(normal_filter, async_filter): - wrap_evalctx = False - if getattr(normal_filter, 'environmentfilter', False): - is_async = lambda args: args[0].is_async - wrap_evalctx = False - else: - if not getattr(normal_filter, 'evalcontextfilter', False) and \ - not getattr(normal_filter, 'contextfilter', False): - wrap_evalctx = True - is_async = lambda args: args[0].environment.is_async - - @wraps(normal_filter) - def wrapper(*args, **kwargs): - b = is_async(args) - if wrap_evalctx: - args = args[1:] - if b: - return async_filter(*args, **kwargs) - return normal_filter(*args, **kwargs) - - if wrap_evalctx: - wrapper.evalcontextfilter = True - - wrapper.asyncfiltervariant = True - - return wrapper - - -def asyncfiltervariant(original): - def decorator(f): - return dualfilter(original, f) - return decorator - - -@asyncfiltervariant(filters.do_first) -async def do_first(environment, seq): - try: - return await auto_aiter(seq).__anext__() - except StopAsyncIteration: - return environment.undefined('No first item, sequence was empty.') - - -@asyncfiltervariant(filters.do_groupby) -async def do_groupby(environment, value, attribute): - expr = filters.make_attrgetter(environment, attribute) - return [filters._GroupTuple(key, await auto_to_seq(values)) - for key, values in filters.groupby(sorted( - await auto_to_seq(value), key=expr), expr)] - - -@asyncfiltervariant(filters.do_join) -async def do_join(eval_ctx, value, d=u'', attribute=None): - return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) - - -@asyncfiltervariant(filters.do_list) -async def do_list(value): - return await auto_to_seq(value) - - -@asyncfiltervariant(filters.do_reject) -async def do_reject(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, False) - - -@asyncfiltervariant(filters.do_rejectattr) -async def do_rejectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, True) - - -@asyncfiltervariant(filters.do_select) -async def do_select(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, False) - - -@asyncfiltervariant(filters.do_selectattr) -async def do_selectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, True) - - -@asyncfiltervariant(filters.do_map) -async def do_map(*args, **kwargs): - seq, func = filters.prepare_map(args, kwargs) - if seq: - async for item in auto_aiter(seq): - yield func(item) - - -@asyncfiltervariant(filters.do_sum) -async def do_sum(environment, iterable, attribute=None, start=0): - rv = start - if attribute is not None: - func = filters.make_attrgetter(environment, attribute) - else: - func = lambda x: x - async for item in auto_aiter(iterable): - rv += func(item) - return rv - - -@asyncfiltervariant(filters.do_slice) -async def do_slice(value, slices, fill_with=None): - return filters.do_slice(await auto_to_seq(value), slices, fill_with) - - -ASYNC_FILTERS = { - 'first': do_first, - 'groupby': do_groupby, - 'join': do_join, - 'list': do_list, - # we intentionally do not support do_last because that would be - # ridiculous - 'reject': do_reject, - 'rejectattr': do_rejectattr, - 'map': do_map, - 'select': do_select, - 'selectattr': do_selectattr, - 'sum': do_sum, - 'slice': do_slice, -} diff --git a/python/jinja2/asyncsupport.py b/python/jinja2/asyncsupport.py deleted file mode 100644 index b1e7b5c..0000000 --- a/python/jinja2/asyncsupport.py +++ /dev/null @@ -1,256 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.asyncsupport - ~~~~~~~~~~~~~~~~~~~ - - Has all the code for async support which is implemented as a patch - for supported Python versions. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -import sys -import asyncio -import inspect -from functools import update_wrapper - -from jinja2.utils import concat, internalcode, Markup -from jinja2.environment import TemplateModule -from jinja2.runtime import LoopContextBase, _last_iteration - - -async def concat_async(async_gen): - rv = [] - async def collect(): - async for event in async_gen: - rv.append(event) - await collect() - return concat(rv) - - -async def generate_async(self, *args, **kwargs): - vars = dict(*args, **kwargs) - try: - async for event in self.root_render_func(self.new_context(vars)): - yield event - except Exception: - exc_info = sys.exc_info() - else: - return - yield self.environment.handle_exception(exc_info, True) - - -def wrap_generate_func(original_generate): - def _convert_generator(self, loop, args, kwargs): - async_gen = self.generate_async(*args, **kwargs) - try: - while 1: - yield loop.run_until_complete(async_gen.__anext__()) - except StopAsyncIteration: - pass - def generate(self, *args, **kwargs): - if not self.environment.is_async: - return original_generate(self, *args, **kwargs) - return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) - return update_wrapper(generate, original_generate) - - -async def render_async(self, *args, **kwargs): - if not self.environment.is_async: - raise RuntimeError('The environment was not created with async mode ' - 'enabled.') - - vars = dict(*args, **kwargs) - ctx = self.new_context(vars) - - try: - return await concat_async(self.root_render_func(ctx)) - except Exception: - exc_info = sys.exc_info() - return self.environment.handle_exception(exc_info, True) - - -def wrap_render_func(original_render): - def render(self, *args, **kwargs): - if not self.environment.is_async: - return original_render(self, *args, **kwargs) - loop = asyncio.get_event_loop() - return loop.run_until_complete(self.render_async(*args, **kwargs)) - return update_wrapper(render, original_render) - - -def wrap_block_reference_call(original_call): - @internalcode - async def async_call(self): - rv = await concat_async(self._stack[self._depth](self._context)) - if self._context.eval_ctx.autoescape: - rv = Markup(rv) - return rv - - @internalcode - def __call__(self): - if not self._context.environment.is_async: - return original_call(self) - return async_call(self) - - return update_wrapper(__call__, original_call) - - -def wrap_macro_invoke(original_invoke): - @internalcode - async def async_invoke(self, arguments, autoescape): - rv = await self._func(*arguments) - if autoescape: - rv = Markup(rv) - return rv - - @internalcode - def _invoke(self, arguments, autoescape): - if not self._environment.is_async: - return original_invoke(self, arguments, autoescape) - return async_invoke(self, arguments, autoescape) - return update_wrapper(_invoke, original_invoke) - - -@internalcode -async def get_default_module_async(self): - if self._module is not None: - return self._module - self._module = rv = await self.make_module_async() - return rv - - -def wrap_default_module(original_default_module): - @internalcode - def _get_default_module(self): - if self.environment.is_async: - raise RuntimeError('Template module attribute is unavailable ' - 'in async mode') - return original_default_module(self) - return _get_default_module - - -async def make_module_async(self, vars=None, shared=False, locals=None): - context = self.new_context(vars, shared, locals) - body_stream = [] - async for item in self.root_render_func(context): - body_stream.append(item) - return TemplateModule(self, context, body_stream) - - -def patch_template(): - from jinja2 import Template - Template.generate = wrap_generate_func(Template.generate) - Template.generate_async = update_wrapper( - generate_async, Template.generate_async) - Template.render_async = update_wrapper( - render_async, Template.render_async) - Template.render = wrap_render_func(Template.render) - Template._get_default_module = wrap_default_module( - Template._get_default_module) - Template._get_default_module_async = get_default_module_async - Template.make_module_async = update_wrapper( - make_module_async, Template.make_module_async) - - -def patch_runtime(): - from jinja2.runtime import BlockReference, Macro - BlockReference.__call__ = wrap_block_reference_call( - BlockReference.__call__) - Macro._invoke = wrap_macro_invoke(Macro._invoke) - - -def patch_filters(): - from jinja2.filters import FILTERS - from jinja2.asyncfilters import ASYNC_FILTERS - FILTERS.update(ASYNC_FILTERS) - - -def patch_all(): - patch_template() - patch_runtime() - patch_filters() - - -async def auto_await(value): - if inspect.isawaitable(value): - return await value - return value - - -async def auto_aiter(iterable): - if hasattr(iterable, '__aiter__'): - async for item in iterable: - yield item - return - for item in iterable: - yield item - - -class AsyncLoopContext(LoopContextBase): - - def __init__(self, async_iterator, undefined, after, length, recurse=None, - depth0=0): - LoopContextBase.__init__(self, undefined, recurse, depth0) - self._async_iterator = async_iterator - self._after = after - self._length = length - - @property - def length(self): - if self._length is None: - raise TypeError('Loop length for some iterators cannot be ' - 'lazily calculated in async mode') - return self._length - - def __aiter__(self): - return AsyncLoopContextIterator(self) - - -class AsyncLoopContextIterator(object): - __slots__ = ('context',) - - def __init__(self, context): - self.context = context - - def __aiter__(self): - return self - - async def __anext__(self): - ctx = self.context - ctx.index0 += 1 - if ctx._after is _last_iteration: - raise StopAsyncIteration() - ctx._before = ctx._current - ctx._current = ctx._after - try: - ctx._after = await ctx._async_iterator.__anext__() - except StopAsyncIteration: - ctx._after = _last_iteration - return ctx._current, ctx - - -async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): - # Length is more complicated and less efficient in async mode. The - # reason for this is that we cannot know if length will be used - # upfront but because length is a property we cannot lazily execute it - # later. This means that we need to buffer it up and measure :( - # - # We however only do this for actual iterators, not for async - # iterators as blocking here does not seem like the best idea in the - # world. - try: - length = len(iterable) - except (TypeError, AttributeError): - if not hasattr(iterable, '__aiter__'): - iterable = tuple(iterable) - length = len(iterable) - else: - length = None - async_iterator = auto_aiter(iterable) - try: - after = await async_iterator.__anext__() - except StopAsyncIteration: - after = _last_iteration - return AsyncLoopContext(async_iterator, undefined, after, length, recurse, - depth0) diff --git a/python/jinja2/bccache.py b/python/jinja2/bccache.py deleted file mode 100644 index 080e527..0000000 --- a/python/jinja2/bccache.py +++ /dev/null @@ -1,362 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.bccache - ~~~~~~~~~~~~~~ - - This module implements the bytecode cache system Jinja is optionally - using. This is useful if you have very complex template situations and - the compiliation of all those templates slow down your application too - much. - - Situations where this is useful are often forking web applications that - are initialized on the first request. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD. -""" -from os import path, listdir -import os -import sys -import stat -import errno -import marshal -import tempfile -import fnmatch -from hashlib import sha1 -from jinja2.utils import open_if_exists -from jinja2._compat import BytesIO, pickle, PY2, text_type - - -# marshal works better on 3.x, one hack less required -if not PY2: - marshal_dump = marshal.dump - marshal_load = marshal.load -else: - - def marshal_dump(code, f): - if isinstance(f, file): - marshal.dump(code, f) - else: - f.write(marshal.dumps(code)) - - def marshal_load(f): - if isinstance(f, file): - return marshal.load(f) - return marshal.loads(f.read()) - - -bc_version = 3 - -# magic version used to only change with new jinja versions. With 2.6 -# we change this to also take Python version changes into account. The -# reason for this is that Python tends to segfault if fed earlier bytecode -# versions because someone thought it would be a good idea to reuse opcodes -# or make Python incompatible with earlier versions. -bc_magic = 'j2'.encode('ascii') + \ - pickle.dumps(bc_version, 2) + \ - pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1]) - - -class Bucket(object): - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment, key, checksum): - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self): - """Resets the bucket (unloads the bytecode).""" - self.code = None - - def load_bytecode(self, f): - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal_load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f): - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError('can\'t write empty bucket') - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal_dump(self.code, f) - - def bytecode_from_string(self, string): - """Load bytecode from a string.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self): - """Return the bytecode as string.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache(object): - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja2. - """ - - def load_bytecode(self, bucket): - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket): - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self): - """Clears the cache. This method is not used by Jinja2 but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key(self, name, filename=None): - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode('utf-8')) - if filename is not None: - filename = '|' + filename - if isinstance(filename, text_type): - filename = filename.encode('utf-8') - hash.update(filename) - return hash.hexdigest() - - def get_source_checksum(self, source): - """Returns a checksum for the source.""" - return sha1(source.encode('utf-8')).hexdigest() - - def get_bucket(self, environment, name, filename, source): - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket): - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__(self, directory=None, pattern='__jinja2_%s.cache'): - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self): - def _unsafe_dir(): - raise RuntimeError('Cannot determine safe temp directory. You ' - 'need to explicitly provide one.') - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == 'nt': - return tmpdir - if not hasattr(os, 'getuid'): - _unsafe_dir() - - dirname = '_jinja2-cache-%d' % os.getuid() - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if actual_dir_stat.st_uid != os.getuid() \ - or not stat.S_ISDIR(actual_dir_stat.st_mode) \ - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if actual_dir_stat.st_uid != os.getuid() \ - or not stat.S_ISDIR(actual_dir_stat.st_mode) \ - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket): - return path.join(self.directory, self.pattern % bucket.key) - - def load_bytecode(self, bucket): - f = open_if_exists(self._get_cache_filename(bucket), 'rb') - if f is not None: - try: - bucket.load_bytecode(f) - finally: - f.close() - - def dump_bytecode(self, bucket): - f = open(self._get_cache_filename(bucket), 'wb') - try: - bucket.write_bytecode(f) - finally: - f.close() - - def clear(self): - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - files = fnmatch.filter(listdir(self.directory), self.pattern % '*') - for filename in files: - try: - remove(path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `werkzeug `_.contrib.cache - - `python-memcached `_ - - `cmemcache `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only unicode. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__(self, client, prefix='jinja2/bytecode/', timeout=None, - ignore_memcache_errors=True): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket): - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - code = None - if code is not None: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket): - args = (self.prefix + bucket.key, bucket.bytecode_to_string()) - if self.timeout is not None: - args += (self.timeout,) - try: - self.client.set(*args) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/python/jinja2/compiler.py b/python/jinja2/compiler.py deleted file mode 100644 index d534a82..0000000 --- a/python/jinja2/compiler.py +++ /dev/null @@ -1,1721 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.compiler - ~~~~~~~~~~~~~~~ - - Compiles nodes into python code. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -from itertools import chain -from copy import deepcopy -from keyword import iskeyword as is_python_keyword -from functools import update_wrapper -from jinja2 import nodes -from jinja2.nodes import EvalContext -from jinja2.visitor import NodeVisitor -from jinja2.optimizer import Optimizer -from jinja2.exceptions import TemplateAssertionError -from jinja2.utils import Markup, concat, escape -from jinja2._compat import range_type, text_type, string_types, \ - iteritems, NativeStringIO, imap, izip -from jinja2.idtracking import Symbols, VAR_LOAD_PARAMETER, \ - VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED - - -operators = { - 'eq': '==', - 'ne': '!=', - 'gt': '>', - 'gteq': '>=', - 'lt': '<', - 'lteq': '<=', - 'in': 'in', - 'notin': 'not in' -} - -# what method to iterate over items do we want to use for dict iteration -# in generated code? on 2.x let's go with iteritems, on 3.x with items -if hasattr(dict, 'iteritems'): - dict_item_iter = 'iteritems' -else: - dict_item_iter = 'items' - -code_features = ['division'] - -# does this python version support generator stops? (PEP 0479) -try: - exec('from __future__ import generator_stop') - code_features.append('generator_stop') -except SyntaxError: - pass - -# does this python version support yield from? -try: - exec('def f(): yield from x()') -except SyntaxError: - supports_yield_from = False -else: - supports_yield_from = True - - -def optimizeconst(f): - def new_func(self, node, frame, **kwargs): - # Only optimize if the frame is not volatile - if self.optimized and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - if new_node != node: - return self.visit(new_node, frame) - return f(self, node, frame, **kwargs) - return update_wrapper(new_func, f) - - -def generate(node, environment, name, filename, stream=None, - defer_init=False, optimized=True): - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError('Can\'t compile non template nodes') - generator = environment.code_generator_class(environment, name, filename, - stream, defer_init, - optimized) - generator.visit(node) - if stream is None: - return generator.stream.getvalue() - - -def has_safe_repr(value): - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: - return True - if type(value) in (tuple, list, set, frozenset): - for item in value: - if not has_safe_repr(item): - return False - return True - elif type(value) is dict: - for key, value in iteritems(value): - if not has_safe_repr(key): - return False - if not has_safe_repr(value): - return False - return True - return False - - -def find_undeclared(nodes, names): - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef(object): - - def __init__(self, node): - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame(object): - """Holds compile time information for us.""" - - def __init__(self, eval_ctx, parent=None, level=None): - self.eval_ctx = eval_ctx - self.symbols = Symbols(parent and parent.symbols or None, - level=level) - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = parent and parent.require_output_check - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer = None - - # the name of the block we're in, otherwise None. - self.block = parent and parent.block or None - - # the parent of this frame - self.parent = parent - - if parent is not None: - self.buffer = parent.buffer - - def copy(self): - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated=False): - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self): - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements. - """ - rv = self.copy() - rv.rootlevel = False - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self): - self.filters = set() - self.tests = set() - - def visit_Filter(self, node): - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node): - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node): - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names): - self.names = set(names) - self.undeclared = set() - - def visit_Name(self, node): - if node.ctx == 'load' and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node): - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - - def __init__(self, environment, name, filename, stream=None, - defer_init=False, optimized=True): - if stream is None: - stream = NativeStringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimized = optimized - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests = {} - self.filters = {} - - # the debug information - self.debug_info = [] - self._write_debug_info = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack = [] - - # Tracks parameter definition blocks - self._param_def_block = [] - - # Tracks the current context. - self._context_reference_stack = ['context'] - - # -- Various compilation helpers - - def fail(self, msg, lineno): - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self): - """Get a new unique identifier.""" - self._last_identifier += 1 - return 't_%d' % self._last_identifier - - def buffer(self, frame): - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline('%s = []' % frame.buffer) - - def return_buffer_contents(self, frame, force_unescaped=False): - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline('if context.eval_ctx.autoescape:') - self.indent() - self.writeline('return Markup(concat(%s))' % frame.buffer) - self.outdent() - self.writeline('else:') - self.indent() - self.writeline('return concat(%s)' % frame.buffer) - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline('return Markup(concat(%s))' % frame.buffer) - return - self.writeline('return concat(%s)' % frame.buffer) - - def indent(self): - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step=1): - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame, node=None): - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline('yield ', node) - else: - self.writeline('%s.append(' % frame.buffer, node) - - def end_write(self, frame): - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(')') - - def simple_write(self, s, frame, node=None): - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes, frame): - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline('pass') - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x): - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write('\n' * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, - self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(' ' * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline(self, x, node=None, extra=0): - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node=None, extra=0): - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature(self, node, frame, extra_kwargs=None): - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occour. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = False - for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): - if is_python_keyword(kwarg): - kwarg_workaround = True - break - - for arg in node.args: - self.write(', ') - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(', ') - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write(', %s=%s' % (key, value)) - if node.dyn_args: - self.write(', *') - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(', **dict({') - else: - self.write(', **{') - for kwarg in node.kwargs: - self.write('%r: ' % kwarg.key) - self.visit(kwarg.value, frame) - self.write(', ') - if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write('%r: %s, ' % (key, value)) - if node.dyn_kwargs is not None: - self.write('}, **') - self.visit(node.dyn_kwargs, frame) - self.write(')') - else: - self.write('}') - - elif node.dyn_kwargs is not None: - self.write(', **') - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes): - """Pull all the dependencies.""" - visitor = DependencyFinderVisitor() - for node in nodes: - visitor.visit(node) - for dependency in 'filters', 'tests': - mapping = getattr(self, dependency) - for name in getattr(visitor, dependency): - if name not in mapping: - mapping[name] = self.temporary_identifier() - self.writeline('%s = environment.%s[%r]' % - (mapping[name], dependency, name)) - - def enter_frame(self, frame): - undefs = [] - for target, (action, param) in iteritems(frame.symbols.loads): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline('%s = %s(%r)' % - (target, self.get_resolve_func(), param)) - elif action == VAR_LOAD_ALIAS: - self.writeline('%s = %s' % (target, param)) - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError('unknown load instruction') - if undefs: - self.writeline('%s = missing' % ' = '.join(undefs)) - - def leave_frame(self, frame, with_python_scope=False): - if not with_python_scope: - undefs = [] - for target, _ in iteritems(frame.symbols.loads): - undefs.append(target) - if undefs: - self.writeline('%s = missing' % ' = '.join(undefs)) - - def func(self, name): - if self.environment.is_async: - return 'async def %s' % name - return 'def %s' % name - - def macro_body(self, node, frame): - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - for idx, arg in enumerate(node.args): - if arg.name == 'caller': - explicit_caller = idx - if arg.name in ('kwargs', 'varargs'): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs')) - - if 'caller' in undeclared: - # In older Jinja2 versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail('When defining macros or call blocks the ' - 'special "caller" argument must be omitted ' - 'or be given a default.', node.lineno) - else: - args.append(frame.symbols.declare_parameter('caller')) - macro_ref.accesses_caller = True - if 'kwargs' in undeclared and not 'kwargs' in skip_special_params: - args.append(frame.symbols.declare_parameter('kwargs')) - macro_ref.accesses_kwargs = True - if 'varargs' in undeclared and not 'varargs' in skip_special_params: - args.append(frame.symbols.declare_parameter('varargs')) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline('if %s is missing:' % ref) - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline('%s = undefined(%r, name=%r)' % ( - ref, - 'parameter %r was not provided' % arg.name, - arg.name)) - else: - self.writeline('%s = ' % ref) - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref, frame): - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, 'name', None) - if len(macro_ref.node.args) == 1: - arg_tuple += ',' - self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, ' - 'context.eval_ctx.autoescape)' % - (name, arg_tuple, macro_ref.accesses_kwargs, - macro_ref.accesses_varargs, macro_ref.accesses_caller)) - - def position(self, node): - """Return a human readable position for the node.""" - rv = 'line %d' % node.lineno - if self.name is not None: - rv += ' in ' + repr(self.name) - return rv - - def dump_local_context(self, frame): - return '{%s}' % ', '.join( - '%r: %s' % (name, target) for name, target - in iteritems(frame.symbols.dump_stores())) - - def write_commons(self): - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline('resolve = context.resolve_or_missing') - self.writeline('undefined = environment.undefined') - self.writeline('if 0: yield None') - - def push_parameter_definitions(self, frame): - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self): - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target): - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target): - self._context_reference_stack.append(target) - - def pop_context_reference(self): - self._context_reference_stack.pop() - - def get_context_ref(self): - return self._context_reference_stack[-1] - - def get_resolve_func(self): - target = self._context_reference_stack[-1] - if target == 'context': - return 'resolve' - return '%s.resolve' % target - - def derive_context(self, frame): - return '%s.derived(%s)' % ( - self.get_context_ref(), - self.dump_local_context(frame), - ) - - def parameter_is_undeclared(self, target): - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self): - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame): - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if not frame.toplevel or not vars: - return - public_names = [x for x in vars if x[:1] != '_'] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - self.writeline('context.vars[%r] = %s' % (name, ref)) - else: - self.writeline('context.vars.update({') - for idx, name in enumerate(vars): - if idx: - self.write(', ') - ref = frame.symbols.ref(name) - self.write('%r: %s' % (name, ref)) - self.write('})') - if public_names: - if len(public_names) == 1: - self.writeline('context.exported_vars.add(%r)' % - public_names[0]) - else: - self.writeline('context.exported_vars.update((%s))' % - ', '.join(imap(repr, public_names))) - - # -- Statement Visitors - - def visit_Template(self, node, frame=None): - assert frame is None, 'no root frame allowed' - eval_ctx = EvalContext(self.environment, self.name) - - from jinja2.runtime import __all__ as exported - self.writeline('from __future__ import %s' % ', '.join(code_features)) - self.writeline('from jinja2.runtime import ' + ', '.join(exported)) - - if self.environment.is_async: - self.writeline('from jinja2.asyncsupport import auto_await, ' - 'auto_aiter, make_async_loop_context') - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = not self.defer_init and ', environment=environment' or '' - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail('block %r defined twice' % block.name, block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if '.' in imp: - module, obj = imp.rsplit('.', 1) - self.writeline('from %s import %s as %s' % - (module, obj, alias)) - else: - self.writeline('import %s as %s' % (imp, alias)) - - # add the load name - self.writeline('name = %r' % self.name) - - # generate the root render function. - self.writeline('%s(context, missing=missing%s):' % - (self.func('root'), envenv), extra=1) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if 'self' in find_undeclared(node.body, ('self',)): - ref = frame.symbols.declare_parameter('self') - self.writeline('%s = TemplateReference(context)' % ref) - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline('parent_template = None') - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline('if parent_template is not None:') - self.indent() - if supports_yield_from and not self.environment.is_async: - self.writeline('yield from parent_template.' - 'root_render_func(context)') - else: - self.writeline('%sfor event in parent_template.' - 'root_render_func(context):' % - (self.environment.is_async and 'async ' or '')) - self.indent() - self.writeline('yield event') - self.outdent() - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in iteritems(self.blocks): - self.writeline('%s(context, missing=missing%s):' % - (self.func('block_' + name), envenv), - block, 1) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - undeclared = find_undeclared(block.body, ('self', 'super')) - if 'self' in undeclared: - ref = block_frame.symbols.declare_parameter('self') - self.writeline('%s = TemplateReference(context)' % ref) - if 'super' in undeclared: - ref = block_frame.symbols.declare_parameter('super') - self.writeline('%s = context.super(%r, ' - 'block_%s)' % (ref, name, name)) - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x) - for x in self.blocks), - extra=1) - - # add a function that returns the debug info - self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x - in self.debug_info)) - - def visit_Block(self, node, frame): - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline('if parent_template is None:') - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if supports_yield_from and not self.environment.is_async and \ - frame.buffer is None: - self.writeline('yield from context.blocks[%r][0](%s)' % ( - node.name, context), node) - else: - loop = self.environment.is_async and 'async for' or 'for' - self.writeline('%s event in context.blocks[%r][0](%s):' % ( - loop, node.name, context), node) - self.indent() - self.simple_write('event', frame) - self.outdent() - - self.outdent(level) - - def visit_Extends(self, node, frame): - """Calls the extender.""" - if not frame.toplevel: - self.fail('cannot use extend from a non top-level scope', - node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline('if parent_template is not None:') - self.indent() - self.writeline('raise TemplateRuntimeError(%r)' % - 'extended multiple times') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline('parent_template = environment.get_template(', node) - self.visit(node.template, frame) - self.write(', %r)' % self.name) - self.writeline('for name, parent_block in parent_template.' - 'blocks.%s():' % dict_item_iter) - self.indent() - self.writeline('context.blocks.setdefault(name, []).' - 'append(parent_block)') - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node, frame): - """Handles includes.""" - if node.ignore_missing: - self.writeline('try:') - self.indent() - - func_name = 'get_or_select_template' - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, string_types): - func_name = 'get_template' - elif isinstance(node.template.value, (tuple, list)): - func_name = 'select_template' - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = 'select_template' - - self.writeline('template = environment.%s(' % func_name, node) - self.visit(node.template, frame) - self.write(', %r)' % self.name) - if node.ignore_missing: - self.outdent() - self.writeline('except TemplateNotFound:') - self.indent() - self.writeline('pass') - self.outdent() - self.writeline('else:') - self.indent() - - skip_event_yield = False - if node.with_context: - loop = self.environment.is_async and 'async for' or 'for' - self.writeline('%s event in template.root_render_func(' - 'template.new_context(context.get_all(), True, ' - '%s)):' % (loop, self.dump_local_context(frame))) - elif self.environment.is_async: - self.writeline('for event in (await ' - 'template._get_default_module_async())' - '._body_stream:') - else: - if supports_yield_from: - self.writeline('yield from template._get_default_module()' - '._body_stream') - skip_event_yield = True - else: - self.writeline('for event in template._get_default_module()' - '._body_stream:') - - if not skip_event_yield: - self.indent() - self.simple_write('event', frame) - self.outdent() - - if node.ignore_missing: - self.outdent() - - def visit_Import(self, node, frame): - """Visit regular imports.""" - self.writeline('%s = ' % frame.symbols.ref(node.target), node) - if frame.toplevel: - self.write('context.vars[%r] = ' % node.target) - if self.environment.is_async: - self.write('await ') - self.write('environment.get_template(') - self.visit(node.template, frame) - self.write(', %r).' % self.name) - if node.with_context: - self.write('make_module%s(context.get_all(), True, %s)' - % (self.environment.is_async and '_async' or '', - self.dump_local_context(frame))) - elif self.environment.is_async: - self.write('_get_default_module_async()') - else: - self.write('_get_default_module()') - if frame.toplevel and not node.target.startswith('_'): - self.writeline('context.exported_vars.discard(%r)' % node.target) - - def visit_FromImport(self, node, frame): - """Visit named imports.""" - self.newline(node) - self.write('included_template = %senvironment.get_template(' - % (self.environment.is_async and 'await ' or '')) - self.visit(node.template, frame) - self.write(', %r).' % self.name) - if node.with_context: - self.write('make_module%s(context.get_all(), True, %s)' - % (self.environment.is_async and '_async' or '', - self.dump_local_context(frame))) - elif self.environment.is_async: - self.write('_get_default_module_async()') - else: - self.write('_get_default_module()') - - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline('%s = getattr(included_template, ' - '%r, missing)' % (frame.symbols.ref(alias), name)) - self.writeline('if %s is missing:' % frame.symbols.ref(alias)) - self.indent() - self.writeline('%s = undefined(%r %% ' - 'included_template.__name__, ' - 'name=%r)' % - (frame.symbols.ref(alias), - 'the template %%r (imported on %s) does ' - 'not export the requested name %s' % ( - self.position(node), - repr(name) - ), name)) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith('_'): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline('context.vars[%r] = %s' % - (name, frame.symbols.ref(name))) - else: - self.writeline('context.vars.update({%s})' % ', '.join( - '%r: %s' % (name, frame.symbols.ref(name)) for name in var_names - )) - if discarded_names: - if len(discarded_names) == 1: - self.writeline('context.exported_vars.discard(%r)' % - discarded_names[0]) - else: - self.writeline('context.exported_vars.difference_' - 'update((%s))' % ', '.join(imap(repr, discarded_names))) - - def visit_For(self, node, frame): - loop_frame = frame.inner() - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body. - extended_loop = node.recursive or 'loop' in \ - find_undeclared(node.iter_child_nodes( - only=('body',)), ('loop',)) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter('loop') - - loop_frame.symbols.analyze_node(node, for_branch='body') - if node.else_: - else_frame.symbols.analyze_node(node, for_branch='else') - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch='test') - self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.environment.is_async and 'async for ' or 'for ') - self.visit(node.target, loop_frame) - self.write(' in ') - self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter') - self.write(':') - self.indent() - self.writeline('if ', node.test) - self.visit(node.test, test_frame) - self.write(':') - self.indent() - self.writeline('yield ') - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline('%s(reciter, loop_render_func, depth=0):' % - self.func('loop'), node) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline('%s = missing' % loop_ref) - - for name in node.find_all(nodes.Name): - if name.ctx == 'store' and name.name == 'loop': - self.fail('Can\'t assign to special loop variable ' - 'in for-loop target', name.lineno) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline('%s = 1' % iteration_indicator) - - self.writeline(self.environment.is_async and 'async for ' or 'for ', node) - self.visit(node.target, loop_frame) - if extended_loop: - if self.environment.is_async: - self.write(', %s in await make_async_loop_context(' % loop_ref) - else: - self.write(', %s in LoopContext(' % loop_ref) - else: - self.write(' in ') - - if node.test: - self.write('%s(' % loop_filter_func) - if node.recursive: - self.write('reciter') - else: - if self.environment.is_async and not extended_loop: - self.write('auto_aiter(') - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(')') - if node.test: - self.write(')') - - if node.recursive: - self.write(', undefined, loop_render_func, depth):') - else: - self.write(extended_loop and ', undefined):' or ':') - - self.indent() - self.enter_frame(loop_frame) - - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline('%s = 0' % iteration_indicator) - self.outdent() - self.leave_frame(loop_frame, with_python_scope=node.recursive - and not node.else_) - - if node.else_: - self.writeline('if %s:' % iteration_indicator) - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - if self.environment.is_async: - self.write('await ') - self.write('loop(') - if self.environment.is_async: - self.write('auto_aiter(') - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(')') - self.write(', loop)') - self.end_write(frame) - - def visit_If(self, node, frame): - if_frame = frame.soft() - self.writeline('if ', node) - self.visit(node.test, if_frame) - self.write(':') - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline('elif ', elif_) - self.visit(elif_.test, if_frame) - self.write(':') - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline('else:') - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node, frame): - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith('_'): - self.write('context.exported_vars.add(%r)' % node.name) - ref = frame.symbols.ref(node.name) - self.writeline('context.vars[%r] = ' % node.name) - self.write('%s = ' % frame.symbols.ref(node.name)) - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node, frame): - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline('caller = ') - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node, frame): - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node, frame): - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for idx, (target, expr) in enumerate(izip(node.targets, node.values)): - self.newline() - self.visit(target, with_frame) - self.write(' = ') - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node, frame): - self.newline(node) - self.visit(node.node, frame) - - def visit_Output(self, node, frame): - # if we have a known extends statement, we don't output anything - # if we are in a require_output_check section - if self.has_known_extends and frame.require_output_check: - return - - allow_constant_finalize = True - if self.environment.finalize: - func = self.environment.finalize - if getattr(func, 'contextfunction', False) or \ - getattr(func, 'evalcontextfunction', False): - allow_constant_finalize = False - elif getattr(func, 'environmentfunction', False): - finalize = lambda x: text_type( - self.environment.finalize(self.environment, x)) - else: - finalize = lambda x: text_type(self.environment.finalize(x)) - else: - finalize = text_type - - # if we are inside a frame that requires output checking, we do so - outdent_later = False - if frame.require_output_check: - self.writeline('if parent_template is None:') - self.indent() - outdent_later = True - - # try to evaluate as many chunks as possible into a static - # string at compile time. - body = [] - for child in node.nodes: - try: - if not allow_constant_finalize: - raise nodes.Impossible() - const = child.as_const(frame.eval_ctx) - except nodes.Impossible: - body.append(child) - continue - # the frame can't be volatile here, becaus otherwise the - # as_const() function would raise an Impossible exception - # at that point. - try: - if frame.eval_ctx.autoescape: - if hasattr(const, '__html__'): - const = const.__html__() - else: - const = escape(const) - const = finalize(const) - except Exception: - # if something goes wrong here we evaluate the node - # at runtime for easier debugging - body.append(child) - continue - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - # if we have less than 3 nodes or a buffer we yield or extend/append - if len(body) < 3 or frame.buffer is not None: - if frame.buffer is not None: - # for one item we append, for more we extend - if len(body) == 1: - self.writeline('%s.append(' % frame.buffer) - else: - self.writeline('%s.extend((' % frame.buffer) - self.indent() - for item in body: - if isinstance(item, list): - val = repr(concat(item)) - if frame.buffer is None: - self.writeline('yield ' + val) - else: - self.writeline(val + ',') - else: - if frame.buffer is None: - self.writeline('yield ', item) - else: - self.newline(item) - close = 1 - if frame.eval_ctx.volatile: - self.write('(escape if context.eval_ctx.autoescape' - ' else to_string)(') - elif frame.eval_ctx.autoescape: - self.write('escape(') - else: - self.write('to_string(') - if self.environment.finalize is not None: - self.write('environment.finalize(') - if getattr(self.environment.finalize, - "contextfunction", False): - self.write('context, ') - close += 1 - self.visit(item, frame) - self.write(')' * close) - if frame.buffer is not None: - self.write(',') - if frame.buffer is not None: - # close the open parentheses - self.outdent() - self.writeline(len(body) == 1 and ')' or '))') - - # otherwise we create a format string as this is faster in that case - else: - format = [] - arguments = [] - for item in body: - if isinstance(item, list): - format.append(concat(item).replace('%', '%%')) - else: - format.append('%s') - arguments.append(item) - self.writeline('yield ') - self.write(repr(concat(format)) + ' % (') - self.indent() - for argument in arguments: - self.newline(argument) - close = 0 - if frame.eval_ctx.volatile: - self.write('(escape if context.eval_ctx.autoescape else' - ' to_string)(') - close += 1 - elif frame.eval_ctx.autoescape: - self.write('escape(') - close += 1 - if self.environment.finalize is not None: - self.write('environment.finalize(') - if getattr(self.environment.finalize, - 'contextfunction', False): - self.write('context, ') - elif getattr(self.environment.finalize, - 'evalcontextfunction', False): - self.write('context.eval_ctx, ') - elif getattr(self.environment.finalize, - 'environmentfunction', False): - self.write('environment, ') - close += 1 - self.visit(argument, frame) - self.write(')' * close + ', ') - self.outdent() - self.writeline(')') - - if outdent_later: - self.outdent() - - def visit_Assign(self, node, frame): - self.push_assign_tracking() - self.newline(node) - self.visit(node.target, frame) - self.write(' = ') - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node, frame): - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(' = (Markup if context.eval_ctx.autoescape ' - 'else identity)(') - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write('concat(%s)' % block_frame.buffer) - self.write(')') - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node, frame): - if node.ctx == 'store' and frame.toplevel: - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == 'load': - load = frame.symbols.find_load(ref) - if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \ - not self.parameter_is_undeclared(ref)): - self.write('(undefined(name=%r) if %s is missing else %s)' % - (node.name, ref, ref)) - return - - self.write(ref) - - def visit_NSRef(self, node, frame): - # NSRefs can only be used to store values; since they use the normal - # `foo.bar` notation they will be parsed as a normal attribute access - # when used anywhere but in a `set` context - ref = frame.symbols.ref(node.name) - self.writeline('if not isinstance(%s, Namespace):' % ref) - self.indent() - self.writeline('raise TemplateRuntimeError(%r)' % - 'cannot assign attribute on non-namespace object') - self.outdent() - self.writeline('%s[%r]' % (ref, node.attr)) - - def visit_Const(self, node, frame): - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node, frame): - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)' - % node.data) - - def visit_Tuple(self, node, frame): - self.write('(') - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(', ') - self.visit(item, frame) - self.write(idx == 0 and ',)' or ')') - - def visit_List(self, node, frame): - self.write('[') - for idx, item in enumerate(node.items): - if idx: - self.write(', ') - self.visit(item, frame) - self.write(']') - - def visit_Dict(self, node, frame): - self.write('{') - for idx, item in enumerate(node.items): - if idx: - self.write(', ') - self.visit(item.key, frame) - self.write(': ') - self.visit(item.value, frame) - self.write('}') - - def binop(operator, interceptable=True): - @optimizeconst - def visitor(self, node, frame): - if self.environment.sandboxed and \ - operator in self.environment.intercepted_binops: - self.write('environment.call_binop(context, %r, ' % operator) - self.visit(node.left, frame) - self.write(', ') - self.visit(node.right, frame) - else: - self.write('(') - self.visit(node.left, frame) - self.write(' %s ' % operator) - self.visit(node.right, frame) - self.write(')') - return visitor - - def uaop(operator, interceptable=True): - @optimizeconst - def visitor(self, node, frame): - if self.environment.sandboxed and \ - operator in self.environment.intercepted_unops: - self.write('environment.call_unop(context, %r, ' % operator) - self.visit(node.node, frame) - else: - self.write('(' + operator) - self.visit(node.node, frame) - self.write(')') - return visitor - - visit_Add = binop('+') - visit_Sub = binop('-') - visit_Mul = binop('*') - visit_Div = binop('/') - visit_FloorDiv = binop('//') - visit_Pow = binop('**') - visit_Mod = binop('%') - visit_And = binop('and', interceptable=False) - visit_Or = binop('or', interceptable=False) - visit_Pos = uaop('+') - visit_Neg = uaop('-') - visit_Not = uaop('not ', interceptable=False) - del binop, uaop - - @optimizeconst - def visit_Concat(self, node, frame): - if frame.eval_ctx.volatile: - func_name = '(context.eval_ctx.volatile and' \ - ' markup_join or unicode_join)' - elif frame.eval_ctx.autoescape: - func_name = 'markup_join' - else: - func_name = 'unicode_join' - self.write('%s((' % func_name) - for arg in node.nodes: - self.visit(arg, frame) - self.write(', ') - self.write('))') - - @optimizeconst - def visit_Compare(self, node, frame): - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - - def visit_Operand(self, node, frame): - self.write(' %s ' % operators[node.op]) - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node, frame): - self.write('environment.getattr(') - self.visit(node.node, frame) - self.write(', %r)' % node.attr) - - @optimizeconst - def visit_Getitem(self, node, frame): - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write('[') - self.visit(node.arg, frame) - self.write(']') - else: - self.write('environment.getitem(') - self.visit(node.node, frame) - self.write(', ') - self.visit(node.arg, frame) - self.write(')') - - def visit_Slice(self, node, frame): - if node.start is not None: - self.visit(node.start, frame) - self.write(':') - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(':') - self.visit(node.step, frame) - - @optimizeconst - def visit_Filter(self, node, frame): - if self.environment.is_async: - self.write('await auto_await(') - self.write(self.filters[node.name] + '(') - func = self.environment.filters.get(node.name) - if func is None: - self.fail('no filter named %r' % node.name, node.lineno) - if getattr(func, 'contextfilter', False): - self.write('context, ') - elif getattr(func, 'evalcontextfilter', False): - self.write('context.eval_ctx, ') - elif getattr(func, 'environmentfilter', False): - self.write('environment, ') - - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write('(context.eval_ctx.autoescape and' - ' Markup(concat(%s)) or concat(%s))' % - (frame.buffer, frame.buffer)) - elif frame.eval_ctx.autoescape: - self.write('Markup(concat(%s))' % frame.buffer) - else: - self.write('concat(%s)' % frame.buffer) - self.signature(node, frame) - self.write(')') - if self.environment.is_async: - self.write(')') - - @optimizeconst - def visit_Test(self, node, frame): - self.write(self.tests[node.name] + '(') - if node.name not in self.environment.tests: - self.fail('no test named %r' % node.name, node.lineno) - self.visit(node.node, frame) - self.signature(node, frame) - self.write(')') - - @optimizeconst - def visit_CondExpr(self, node, frame): - def write_expr2(): - if node.expr2 is not None: - return self.visit(node.expr2, frame) - self.write('undefined(%r)' % ('the inline if-' - 'expression on %s evaluated to false and ' - 'no else section was defined.' % self.position(node))) - - self.write('(') - self.visit(node.expr1, frame) - self.write(' if ') - self.visit(node.test, frame) - self.write(' else ') - write_expr2() - self.write(')') - - @optimizeconst - def visit_Call(self, node, frame, forward_caller=False): - if self.environment.is_async: - self.write('await auto_await(') - if self.environment.sandboxed: - self.write('environment.call(context, ') - else: - self.write('context.call(') - self.visit(node.node, frame) - extra_kwargs = forward_caller and {'caller': 'caller'} or None - self.signature(node, frame, extra_kwargs) - self.write(')') - if self.environment.is_async: - self.write(')') - - def visit_Keyword(self, node, frame): - self.write(node.key + '=') - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node, frame): - self.write('Markup(') - self.visit(node.expr, frame) - self.write(')') - - def visit_MarkSafeIfAutoescape(self, node, frame): - self.write('(context.eval_ctx.autoescape and Markup or identity)(') - self.visit(node.expr, frame) - self.write(')') - - def visit_EnvironmentAttribute(self, node, frame): - self.write('environment.' + node.name) - - def visit_ExtensionAttribute(self, node, frame): - self.write('environment.extensions[%r].%s' % (node.identifier, node.name)) - - def visit_ImportedName(self, node, frame): - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node, frame): - self.write(node.name) - - def visit_ContextReference(self, node, frame): - self.write('context') - - def visit_Continue(self, node, frame): - self.writeline('continue', node) - - def visit_Break(self, node, frame): - self.writeline('break', node) - - def visit_Scope(self, node, frame): - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node, frame): - ctx = self.temporary_identifier() - self.writeline('%s = %s' % (ctx, self.derive_context(frame))) - self.writeline('%s.vars = ' % ctx) - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier(self, node, frame): - for keyword in node.options: - self.writeline('context.eval_ctx.%s = ' % keyword.key) - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier(self, node, frame): - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline('%s = context.eval_ctx.save()' % old_ctx_name) - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name) diff --git a/python/jinja2/constants.py b/python/jinja2/constants.py deleted file mode 100644 index 11efd1e..0000000 --- a/python/jinja2/constants.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja.constants - ~~~~~~~~~~~~~~~ - - Various constants. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" - - -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = u'''\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate''' diff --git a/python/jinja2/debug.py b/python/jinja2/debug.py deleted file mode 100644 index b61139f..0000000 --- a/python/jinja2/debug.py +++ /dev/null @@ -1,372 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.debug - ~~~~~~~~~~~~ - - Implements the debug interface for Jinja. This module does some pretty - ugly stuff with the Python traceback system in order to achieve tracebacks - with correct line numbers, locals and contents. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -import sys -import traceback -from types import TracebackType, CodeType -from jinja2.utils import missing, internal_code -from jinja2.exceptions import TemplateSyntaxError -from jinja2._compat import iteritems, reraise, PY2 - -# on pypy we can take advantage of transparent proxies -try: - from __pypy__ import tproxy -except ImportError: - tproxy = None - - -# how does the raise helper look like? -try: - exec("raise TypeError, 'foo'") -except SyntaxError: - raise_helper = 'raise __jinja_exception__[1]' -except TypeError: - raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]' - - -class TracebackFrameProxy(object): - """Proxies a traceback frame.""" - - def __init__(self, tb): - self.tb = tb - self._tb_next = None - - @property - def tb_next(self): - return self._tb_next - - def set_next(self, next): - if tb_set_next is not None: - try: - tb_set_next(self.tb, next and next.tb or None) - except Exception: - # this function can fail due to all the hackery it does - # on various python implementations. We just catch errors - # down and ignore them if necessary. - pass - self._tb_next = next - - @property - def is_jinja_frame(self): - return '__jinja_template__' in self.tb.tb_frame.f_globals - - def __getattr__(self, name): - return getattr(self.tb, name) - - -def make_frame_proxy(frame): - proxy = TracebackFrameProxy(frame) - if tproxy is None: - return proxy - def operation_handler(operation, *args, **kwargs): - if operation in ('__getattribute__', '__getattr__'): - return getattr(proxy, args[0]) - elif operation == '__setattr__': - proxy.__setattr__(*args, **kwargs) - else: - return getattr(proxy, operation)(*args, **kwargs) - return tproxy(TracebackType, operation_handler) - - -class ProcessedTraceback(object): - """Holds a Jinja preprocessed traceback for printing or reraising.""" - - def __init__(self, exc_type, exc_value, frames): - assert frames, 'no frames for this traceback?' - self.exc_type = exc_type - self.exc_value = exc_value - self.frames = frames - - # newly concatenate the frames (which are proxies) - prev_tb = None - for tb in self.frames: - if prev_tb is not None: - prev_tb.set_next(tb) - prev_tb = tb - prev_tb.set_next(None) - - def render_as_text(self, limit=None): - """Return a string with the traceback.""" - lines = traceback.format_exception(self.exc_type, self.exc_value, - self.frames[0], limit=limit) - return ''.join(lines).rstrip() - - def render_as_html(self, full=False): - """Return a unicode string with the traceback as rendered HTML.""" - from jinja2.debugrenderer import render_traceback - return u'%s\n\n' % ( - render_traceback(self, full=full), - self.render_as_text().decode('utf-8', 'replace') - ) - - @property - def is_template_syntax_error(self): - """`True` if this is a template syntax error.""" - return isinstance(self.exc_value, TemplateSyntaxError) - - @property - def exc_info(self): - """Exception info tuple with a proxy around the frame objects.""" - return self.exc_type, self.exc_value, self.frames[0] - - @property - def standard_exc_info(self): - """Standard python exc_info for re-raising""" - tb = self.frames[0] - # the frame will be an actual traceback (or transparent proxy) if - # we are on pypy or a python implementation with support for tproxy - if type(tb) is not TracebackType: - tb = tb.tb - return self.exc_type, self.exc_value, tb - - -def make_traceback(exc_info, source_hint=None): - """Creates a processed traceback object from the exc_info.""" - exc_type, exc_value, tb = exc_info - if isinstance(exc_value, TemplateSyntaxError): - exc_info = translate_syntax_error(exc_value, source_hint) - initial_skip = 0 - else: - initial_skip = 1 - return translate_exception(exc_info, initial_skip) - - -def translate_syntax_error(error, source=None): - """Rewrites a syntax error to please traceback systems.""" - error.source = source - error.translated = True - exc_info = (error.__class__, error, None) - filename = error.filename - if filename is None: - filename = '' - return fake_exc_info(exc_info, filename, error.lineno) - - -def translate_exception(exc_info, initial_skip=0): - """If passed an exc_info it will automatically rewrite the exceptions - all the way down to the correct line numbers and frames. - """ - tb = exc_info[2] - frames = [] - - # skip some internal frames if wanted - for x in range(initial_skip): - if tb is not None: - tb = tb.tb_next - initial_tb = tb - - while tb is not None: - # skip frames decorated with @internalcode. These are internal - # calls we can't avoid and that are useless in template debugging - # output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - # save a reference to the next frame if we override the current - # one with a faked one. - next = tb.tb_next - - # fake template exceptions - template = tb.tb_frame.f_globals.get('__jinja_template__') - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, - lineno)[2] - - frames.append(make_frame_proxy(tb)) - tb = next - - # if we don't have any exceptions in the frames left, we have to - # reraise it unchanged. - # XXX: can we backup here? when could this happen? - if not frames: - reraise(exc_info[0], exc_info[1], exc_info[2]) - - return ProcessedTraceback(exc_info[0], exc_info[1], frames) - - -def get_jinja_locals(real_locals): - ctx = real_locals.get('context') - if ctx: - locals = ctx.get_all().copy() - else: - locals = {} - - local_overrides = {} - - for name, value in iteritems(real_locals): - if not name.startswith('l_') or value is missing: - continue - try: - _, depth, name = name.split('_', 2) - depth = int(depth) - except ValueError: - continue - cur_depth = local_overrides.get(name, (-1,))[0] - if cur_depth < depth: - local_overrides[name] = (depth, value) - - for name, (_, value) in iteritems(local_overrides): - if value is missing: - locals.pop(name, None) - else: - locals[name] = value - - return locals - - -def fake_exc_info(exc_info, filename, lineno): - """Helper for `translate_exception`.""" - exc_type, exc_value, tb = exc_info - - # figure the real context out - if tb is not None: - locals = get_jinja_locals(tb.tb_frame.f_locals) - - # if there is a local called __jinja_exception__, we get - # rid of it to not break the debug functionality. - locals.pop('__jinja_exception__', None) - else: - locals = {} - - # assamble fake globals we need - globals = { - '__name__': filename, - '__file__': filename, - '__jinja_exception__': exc_info[:2], - - # we don't want to keep the reference to the template around - # to not cause circular dependencies, but we mark it as Jinja - # frame for the ProcessedTraceback - '__jinja_template__': None - } - - # and fake the exception - code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') - - # if it's possible, change the name of the code. This won't work - # on some python environments such as google appengine - try: - if tb is None: - location = 'template' - else: - function = tb.tb_frame.f_code.co_name - if function == 'root': - location = 'top-level template code' - elif function.startswith('block_'): - location = 'block "%s"' % function[6:] - else: - location = 'template' - - if PY2: - code = CodeType(0, code.co_nlocals, code.co_stacksize, - code.co_flags, code.co_code, code.co_consts, - code.co_names, code.co_varnames, filename, - location, code.co_firstlineno, - code.co_lnotab, (), ()) - else: - code = CodeType(0, code.co_kwonlyargcount, - code.co_nlocals, code.co_stacksize, - code.co_flags, code.co_code, code.co_consts, - code.co_names, code.co_varnames, filename, - location, code.co_firstlineno, - code.co_lnotab, (), ()) - except Exception as e: - pass - - # execute the code and catch the new traceback - try: - exec(code, globals, locals) - except: - exc_info = sys.exc_info() - new_tb = exc_info[2].tb_next - - # return without this frame - return exc_info[:2] + (new_tb,) - - -def _init_ugly_crap(): - """This function implements a few ugly things so that we can patch the - traceback objects. The function returned allows resetting `tb_next` on - any python traceback object. Do not attempt to use this on non cpython - interpreters - """ - import ctypes - from types import TracebackType - - if PY2: - # figure out size of _Py_ssize_t for Python 2: - if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): - _Py_ssize_t = ctypes.c_int64 - else: - _Py_ssize_t = ctypes.c_int - else: - # platform ssize_t on Python 3 - _Py_ssize_t = ctypes.c_ssize_t - - # regular python - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) - ] - - # python with trace - if hasattr(sys, 'getobjects'): - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('_ob_next', ctypes.POINTER(_PyObject)), - ('_ob_prev', ctypes.POINTER(_PyObject)), - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) - ] - - class _Traceback(_PyObject): - pass - _Traceback._fields_ = [ - ('tb_next', ctypes.POINTER(_Traceback)), - ('tb_frame', ctypes.POINTER(_PyObject)), - ('tb_lasti', ctypes.c_int), - ('tb_lineno', ctypes.c_int) - ] - - def tb_set_next(tb, next): - """Set the tb_next attribute of a traceback object.""" - if not (isinstance(tb, TracebackType) and - (next is None or isinstance(next, TracebackType))): - raise TypeError('tb_set_next arguments must be traceback objects') - obj = _Traceback.from_address(id(tb)) - if tb.tb_next is not None: - old = _Traceback.from_address(id(tb.tb_next)) - old.ob_refcnt -= 1 - if next is None: - obj.tb_next = ctypes.POINTER(_Traceback)() - else: - next = _Traceback.from_address(id(next)) - next.ob_refcnt += 1 - obj.tb_next = ctypes.pointer(next) - - return tb_set_next - - -# try to get a tb_set_next implementation if we don't have transparent -# proxies. -tb_set_next = None -if tproxy is None: - try: - tb_set_next = _init_ugly_crap() - except: - pass - del _init_ugly_crap diff --git a/python/jinja2/defaults.py b/python/jinja2/defaults.py deleted file mode 100644 index 7c93dec..0000000 --- a/python/jinja2/defaults.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.defaults - ~~~~~~~~~~~~~~~ - - Jinja default filters and tags. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -from jinja2._compat import range_type -from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner, Namespace - - -# defaults for the parser / lexer -BLOCK_START_STRING = '{%' -BLOCK_END_STRING = '%}' -VARIABLE_START_STRING = '{{' -VARIABLE_END_STRING = '}}' -COMMENT_START_STRING = '{#' -COMMENT_END_STRING = '#}' -LINE_STATEMENT_PREFIX = None -LINE_COMMENT_PREFIX = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = '\n' -KEEP_TRAILING_NEWLINE = False - - -# default filters, tests and namespace -from jinja2.filters import FILTERS as DEFAULT_FILTERS -from jinja2.tests import TESTS as DEFAULT_TESTS -DEFAULT_NAMESPACE = { - 'range': range_type, - 'dict': dict, - 'lipsum': generate_lorem_ipsum, - 'cycler': Cycler, - 'joiner': Joiner, - 'namespace': Namespace -} - - -# default policies -DEFAULT_POLICIES = { - 'compiler.ascii_str': True, - 'urlize.rel': 'noopener', - 'urlize.target': None, - 'truncate.leeway': 5, - 'json.dumps_function': None, - 'json.dumps_kwargs': {'sort_keys': True}, - 'ext.i18n.trimmed': False, -} - - -# export all constants -__all__ = tuple(x for x in locals().keys() if x.isupper()) diff --git a/python/jinja2/environment.py b/python/jinja2/environment.py deleted file mode 100644 index 549d9af..0000000 --- a/python/jinja2/environment.py +++ /dev/null @@ -1,1276 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.environment - ~~~~~~~~~~~~~~~~~~ - - Provides a class that holds runtime and parsing time options. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -import os -import sys -import weakref -from functools import reduce, partial -from jinja2 import nodes -from jinja2.defaults import BLOCK_START_STRING, \ - BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ - COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ - LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ - DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ - DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS -from jinja2.lexer import get_lexer, TokenStream -from jinja2.parser import Parser -from jinja2.nodes import EvalContext -from jinja2.compiler import generate, CodeGenerator -from jinja2.runtime import Undefined, new_context, Context -from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ - TemplatesNotFound, TemplateRuntimeError -from jinja2.utils import import_string, LRUCache, Markup, missing, \ - concat, consume, internalcode, have_async_gen -from jinja2._compat import imap, ifilter, string_types, iteritems, \ - text_type, reraise, implements_iterator, implements_to_string, \ - encode_filename, PY2, PYPY - - -# for direct template usage we have up to ten living environments -_spontaneous_environments = LRUCache(10) - -# the function to create jinja traceback objects. This is dynamically -# imported on the first exception in the exception handler. -_make_traceback = None - - -def get_spontaneous_environment(*args): - """Return a new spontaneous environment. A spontaneous environment is an - unnamed and unaccessible (in theory) environment that is used for - templates generated from a string and not from the file system. - """ - try: - env = _spontaneous_environments.get(args) - except TypeError: - return Environment(*args) - if env is not None: - return env - _spontaneous_environments[args] = env = Environment(*args) - env.shared = True - return env - - -def create_cache(size): - """Return the cache class for the given size.""" - if size == 0: - return None - if size < 0: - return {} - return LRUCache(size) - - -def copy_cache(cache): - """Create an empty copy of the given cache.""" - if cache is None: - return None - elif type(cache) is dict: - return {} - return LRUCache(cache.capacity) - - -def load_extensions(environment, extensions): - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. - """ - result = {} - for extension in extensions: - if isinstance(extension, string_types): - extension = import_string(extension) - result[extension.identifier] = extension(environment) - return result - - -def fail_for_missing_callable(string, name): - msg = string % name - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e) - raise TemplateRuntimeError(msg) - - -def _environment_sanity_check(environment): - """Perform a sanity check on the environment.""" - assert issubclass(environment.undefined, Undefined), 'undefined must ' \ - 'be a subclass of undefined because filters depend on it.' - assert environment.block_start_string != \ - environment.variable_start_string != \ - environment.comment_start_string, 'block, variable and comment ' \ - 'start strings must be different' - assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ - 'newline_sequence set to unknown line ending string.' - return environment - - -class Environment(object): - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which allows - you to take advantage of newer Python features. This requires - Python 3.6 or later. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: these are currently EXPERIMENTAL undocumented features. - exception_handler = None - exception_formatter = None - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class = CodeGenerator - - #: the context class thatis used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class = Context - - def __init__(self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=400, - auto_reload=True, - bytecode_cache=None, - enable_async=False): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.enable_async = enable_async - self.is_async = self.enable_async and have_async_gen - - _environment_sanity_check(self) - - def add_extension(self, extension): - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes): - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in iteritems(attributes): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay(self, block_start_string=missing, block_end_string=missing, - variable_start_string=missing, variable_end_string=missing, - comment_start_string=missing, comment_end_string=missing, - line_statement_prefix=missing, line_comment_prefix=missing, - trim_blocks=missing, lstrip_blocks=missing, - extensions=missing, optimized=missing, - undefined=missing, finalize=missing, autoescape=missing, - loader=missing, cache_size=missing, auto_reload=missing, - bytecode_cache=missing): - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - """ - args = dict(locals()) - del args['self'], args['cache_size'], args['extensions'] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in iteritems(args): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in iteritems(self.extensions): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - return _environment_sanity_check(rv) - - lexer = property(get_lexer, doc="The lexer for this environment.") - - def iter_extensions(self): - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), - key=lambda x: x.priority)) - - def getitem(self, obj, argument): - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, string_types): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj, attribute): - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a bytestring. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def call_filter(self, name, value, args=None, kwargs=None, - context=None, eval_ctx=None): - """Invokes a filter on a value the same way the compiler does it. - - Note that on Python 3 this might return a coroutine in case the - filter is running from an environment in async mode and the filter - supports async execution. It's your responsibility to await this - if needed. - - .. versionadded:: 2.7 - """ - func = self.filters.get(name) - if func is None: - fail_for_missing_callable('no filter named %r', name) - args = [value] + list(args or ()) - if getattr(func, 'contextfilter', False): - if context is None: - raise TemplateRuntimeError('Attempted to invoke context ' - 'filter without context') - args.insert(0, context) - elif getattr(func, 'evalcontextfilter', False): - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - args.insert(0, eval_ctx) - elif getattr(func, 'environmentfilter', False): - args.insert(0, self) - return func(*args, **(kwargs or {})) - - def call_test(self, name, value, args=None, kwargs=None): - """Invokes a test on a value the same way the compiler does it. - - .. versionadded:: 2.7 - """ - func = self.tests.get(name) - if func is None: - fail_for_missing_callable('no test named %r', name) - return func(value, *(args or ()), **(kwargs or {})) - - @internalcode - def parse(self, source, name=None, filename=None): - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja2 extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) - - def _parse(self, source, name, filename): - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, encode_filename(filename)).parse() - - def lex(self, source, name=None, filename=None): - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = text_type(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) - - def preprocess(self, source, name=None, filename=None): - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce(lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), text_type(source)) - - def _tokenize(self, source, name, filename=None, state=None): - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - return stream - - def _generate(self, source, name, filename, defer_init=False): - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate(source, self, name, filename, defer_init=defer_init, - optimized=self.optimized) - - def _compile(self, source, filename): - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, 'exec') - - @internalcode - def compile(self, source, name=None, filename=None, raw=False, - defer_init=False): - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, string_types): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, - defer_init=defer_init) - if raw: - return source - if filename is None: - filename = '