UNKNOWN
-
-
Save CeivenLean/5efe88dec95642fb8d633757dc3efe61 to your computer and use it in GitHub Desktop.
<component name="ProjectCodeStyleConfiguration"> | |
<state> | |
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" /> | |
</state> | |
</component> |
<component name="ProjectDictionaryState"> | |
<dictionary name="Ceiven" /> | |
</component> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="Encoding" defaultCharsetForPropertiesFiles="UTF-8"> | |
<file url="PROJECT" charset="UTF-8" /> | |
</component> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="JavaScriptSettings"> | |
<option name="languageLevel" value="ES6" /> | |
</component> | |
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (shoppy)" project-jdk-type="Python SDK" /> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="ProjectModuleManager"> | |
<modules> | |
<module fileurl="file://$PROJECT_DIR$/.idea/shoppy.iml" filepath="$PROJECT_DIR$/.idea/shoppy.iml" /> | |
</modules> | |
</component> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<module type="PYTHON_MODULE" version="4"> | |
<component name="Flask"> | |
<option name="enabled" value="true" /> | |
</component> | |
<component name="NewModuleRootManager"> | |
<content url="file://$MODULE_DIR$"> | |
<excludeFolder url="file://$MODULE_DIR$/venv" /> | |
</content> | |
<orderEntry type="inheritedJdk" /> | |
<orderEntry type="sourceFolder" forTests="false" /> | |
</component> | |
<component name="TemplatesService"> | |
<option name="TEMPLATE_CONFIGURATION" value="Jinja2" /> | |
<option name="TEMPLATE_FOLDERS"> | |
<list> | |
<option value="$MODULE_DIR$/../shoppy\templates" /> | |
</list> | |
</option> | |
</component> | |
<component name="TestRunnerService"> | |
<option name="PROJECT_TEST_RUNNER" value="Unittests" /> | |
</component> | |
</module> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="SqlDialectMappings"> | |
<file url="PROJECT" dialect="MySQL" /> | |
</component> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="VagrantProjectSettings"> | |
<option name="instanceFolder" value="" /> | |
<option name="provider" value="" /> | |
</component> | |
</project> |
from flask import Flask | |
app = Flask(__name__) | |
@app.route('/') | |
def hello_world(): | |
return 'Hello World!' | |
if __name__ == '__main__': | |
app.run(debug=True) |
pip |
Metadata-Version: 2.0 | |
Name: click | |
Version: 6.7 | |
Summary: A simple wrapper around optparse for powerful command line utilities. | |
Home-page: http://github.com/mitsuhiko/click | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
License: UNKNOWN | |
Platform: UNKNOWN | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 3 | |
UNKNOWN | |
{"classifiers": ["License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/mitsuhiko/click"}}}, "generator": "bdist_wheel (0.30.0.a0)", "metadata_version": "2.0", "name": "click", "summary": "A simple wrapper around optparse for powerful command line utilities.", "version": "6.7"} |
click/__init__.py,sha256=k8R00cFKWI8dhDVKQeLBlAdNh1CxerMEDRiGnr32gdw,2858 | |
click/_bashcomplete.py,sha256=82rMiibtEurdwBq60NHXVCBuGXJHDpblFO9o2YxJDF0,2423 | |
click/_compat.py,sha256=j59MpzxYGE-fTGj0A5sg8UI8GhHod1XMojiCA0jvbL0,21011 | |
click/_termui_impl.py,sha256=Ol1JJhvBRw3l8j1WIU0tOWjQtxxmwGE44lFDbzDqzoA,16395 | |
click/_textwrap.py,sha256=gwS4m7bdQiJnzaDG8osFcRb-5vn4t4l2qSCy-5csCEc,1198 | |
click/_unicodefun.py,sha256=A3UOzJw6lEZyol2SBg3fNXgweTutaOzkJ61OB7vik3Y,4204 | |
click/_winconsole.py,sha256=MzG46DEYPoRyx4SO7EIhFuFZHESgooAfJLIukbB6p5c,7790 | |
click/core.py,sha256=M0nJ6Kkye7XZXYG7HCbkJWSfy14WHV6bQmGLACrOhKw,70254 | |
click/decorators.py,sha256=y7CX2needh8iRWafj-QS_hGQFsN24eyXAhx5Y2ATwas,10941 | |
click/exceptions.py,sha256=rOa0pP3PbSy0_AAPOW9irBEM8AJ3BySN-4z2VUwFVo4,6788 | |
click/formatting.py,sha256=eh-cypTUAhpI3HD-K4ZpR3vCiURIO62xXvKkR3tNUTM,8889 | |
click/globals.py,sha256=PAgnKvGxq4YuEIldw3lgYOGBLYwsyxnm1IByBX3BFXo,1515 | |
click/parser.py,sha256=i01xgYuIA6AwQWEXjshwHSwnTR3gUep4FxJIfyW4ta4,15510 | |
click/termui.py,sha256=Bp99MSWQtyoWe1_7HggDmA77n--3KLxu7NsZMFMaCUo,21008 | |
click/testing.py,sha256=kJ9mjtJgwNAlkgKcFf9-ISxufmaPDbbuOHVC9WIvKdY,11002 | |
click/types.py,sha256=ZGb2lmFs5Vwd9loTRIMbGcqhPVOql8mGoBhWBRT6V4E,18864 | |
click/utils.py,sha256=1jalPlkUU28JReTEQeeSFtbJd-SirYWBNfjtELBKzT4,14916 | |
click-6.7.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 | |
click-6.7.dist-info/METADATA,sha256=l6lAyogIUXiHKUK_rWguef-EMcvO5C6bXzFCNCcblbQ,424 | |
click-6.7.dist-info/RECORD,, | |
click-6.7.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 | |
click-6.7.dist-info/metadata.json,sha256=qg0uO6amNHkIkOxnmWX7Xa_DNQMQ62Q6drivuP9Gh1c,571 | |
click-6.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 | |
click-6.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |
click/__pycache__/core.cpython-37.pyc,, | |
click/__pycache__/decorators.cpython-37.pyc,, | |
click/__pycache__/exceptions.cpython-37.pyc,, | |
click/__pycache__/formatting.cpython-37.pyc,, | |
click/__pycache__/globals.cpython-37.pyc,, | |
click/__pycache__/parser.cpython-37.pyc,, | |
click/__pycache__/termui.cpython-37.pyc,, | |
click/__pycache__/testing.cpython-37.pyc,, | |
click/__pycache__/types.cpython-37.pyc,, | |
click/__pycache__/utils.cpython-37.pyc,, | |
click/__pycache__/_bashcomplete.cpython-37.pyc,, | |
click/__pycache__/_compat.cpython-37.pyc,, | |
click/__pycache__/_termui_impl.cpython-37.pyc,, | |
click/__pycache__/_textwrap.cpython-37.pyc,, | |
click/__pycache__/_unicodefun.cpython-37.pyc,, | |
click/__pycache__/_winconsole.cpython-37.pyc,, | |
click/__pycache__/__init__.cpython-37.pyc,, |
click |
Wheel-Version: 1.0 | |
Generator: bdist_wheel (0.30.0.a0) | |
Root-Is-Purelib: true | |
Tag: py2-none-any | |
Tag: py3-none-any | |
# -*- coding: utf-8 -*- | |
""" | |
click | |
~~~~~ | |
Click is a simple Python module that wraps the stdlib's optparse to make | |
writing command line scripts fun. Unlike other modules, it's based around | |
a simple API that does not come with too much magic and is composable. | |
In case optparse ever gets removed from the stdlib, it will be shipped by | |
this module. | |
:copyright: (c) 2014 by Armin Ronacher. | |
:license: BSD, see LICENSE for more details. | |
""" | |
# Core classes | |
from .core import Context, BaseCommand, Command, MultiCommand, Group, \ | |
CommandCollection, Parameter, Option, Argument | |
# Globals | |
from .globals import get_current_context | |
# Decorators | |
from .decorators import pass_context, pass_obj, make_pass_decorator, \ | |
command, group, argument, option, confirmation_option, \ | |
password_option, version_option, help_option | |
# Types | |
from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ | |
STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED | |
# Utilities | |
from .utils import echo, get_binary_stream, get_text_stream, open_file, \ | |
format_filename, get_app_dir, get_os_args | |
# Terminal functions | |
from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \ | |
progressbar, clear, style, unstyle, secho, edit, launch, getchar, \ | |
pause | |
# Exceptions | |
from .exceptions import ClickException, UsageError, BadParameter, \ | |
FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \ | |
MissingParameter | |
# Formatting | |
from .formatting import HelpFormatter, wrap_text | |
# Parsing | |
from .parser import OptionParser | |
__all__ = [ | |
# Core classes | |
'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', | |
'CommandCollection', 'Parameter', 'Option', 'Argument', | |
# Globals | |
'get_current_context', | |
# Decorators | |
'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', | |
'argument', 'option', 'confirmation_option', 'password_option', | |
'version_option', 'help_option', | |
# Types | |
'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', 'STRING', | |
'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', | |
# Utilities | |
'echo', 'get_binary_stream', 'get_text_stream', 'open_file', | |
'format_filename', 'get_app_dir', 'get_os_args', | |
# Terminal functions | |
'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', | |
'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', | |
'getchar', 'pause', | |
# Exceptions | |
'ClickException', 'UsageError', 'BadParameter', 'FileError', | |
'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', | |
'MissingParameter', | |
# Formatting | |
'HelpFormatter', 'wrap_text', | |
# Parsing | |
'OptionParser', | |
] | |
# Controls if click should emit the warning about the use of unicode | |
# literals. | |
disable_unicode_literals_warning = False | |
__version__ = '6.7' |
import os | |
import re | |
from .utils import echo | |
from .parser import split_arg_string | |
from .core import MultiCommand, Option | |
COMPLETION_SCRIPT = ''' | |
%(complete_func)s() { | |
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ | |
COMP_CWORD=$COMP_CWORD \\ | |
%(autocomplete_var)s=complete $1 ) ) | |
return 0 | |
} | |
complete -F %(complete_func)s -o default %(script_names)s | |
''' | |
_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') | |
def get_completion_script(prog_name, complete_var): | |
cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) | |
return (COMPLETION_SCRIPT % { | |
'complete_func': '_%s_completion' % cf_name, | |
'script_names': prog_name, | |
'autocomplete_var': complete_var, | |
}).strip() + ';' | |
def resolve_ctx(cli, prog_name, args): | |
ctx = cli.make_context(prog_name, args, resilient_parsing=True) | |
while ctx.protected_args + ctx.args and isinstance(ctx.command, MultiCommand): | |
a = ctx.protected_args + ctx.args | |
cmd = ctx.command.get_command(ctx, a[0]) | |
if cmd is None: | |
return None | |
ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) | |
return ctx | |
def get_choices(cli, prog_name, args, incomplete): | |
ctx = resolve_ctx(cli, prog_name, args) | |
if ctx is None: | |
return | |
choices = [] | |
if incomplete and not incomplete[:1].isalnum(): | |
for param in ctx.command.params: | |
if not isinstance(param, Option): | |
continue | |
choices.extend(param.opts) | |
choices.extend(param.secondary_opts) | |
elif isinstance(ctx.command, MultiCommand): | |
choices.extend(ctx.command.list_commands(ctx)) | |
for item in choices: | |
if item.startswith(incomplete): | |
yield item | |
def do_complete(cli, prog_name): | |
cwords = split_arg_string(os.environ['COMP_WORDS']) | |
cword = int(os.environ['COMP_CWORD']) | |
args = cwords[1:cword] | |
try: | |
incomplete = cwords[cword] | |
except IndexError: | |
incomplete = '' | |
for item in get_choices(cli, prog_name, args, incomplete): | |
echo(item) | |
return True | |
def bashcomplete(cli, prog_name, complete_var, complete_instr): | |
if complete_instr == 'source': | |
echo(get_completion_script(prog_name, complete_var)) | |
return True | |
elif complete_instr == 'complete': | |
return do_complete(cli, prog_name) | |
return False |
import re | |
import io | |
import os | |
import sys | |
import codecs | |
from weakref import WeakKeyDictionary | |
PY2 = sys.version_info[0] == 2 | |
WIN = sys.platform.startswith('win') | |
DEFAULT_COLUMNS = 80 | |
_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])') | |
def get_filesystem_encoding(): | |
return sys.getfilesystemencoding() or sys.getdefaultencoding() | |
def _make_text_stream(stream, encoding, errors): | |
if encoding is None: | |
encoding = get_best_encoding(stream) | |
if errors is None: | |
errors = 'replace' | |
return _NonClosingTextIOWrapper(stream, encoding, errors, | |
line_buffering=True) | |
def is_ascii_encoding(encoding): | |
"""Checks if a given encoding is ascii.""" | |
try: | |
return codecs.lookup(encoding).name == 'ascii' | |
except LookupError: | |
return False | |
def get_best_encoding(stream): | |
"""Returns the default stream encoding if not found.""" | |
rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding() | |
if is_ascii_encoding(rv): | |
return 'utf-8' | |
return rv | |
class _NonClosingTextIOWrapper(io.TextIOWrapper): | |
def __init__(self, stream, encoding, errors, **extra): | |
self._stream = stream = _FixupStream(stream) | |
io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) | |
# The io module is a place where the Python 3 text behavior | |
# was forced upon Python 2, so we need to unbreak | |
# it to look like Python 2. | |
if PY2: | |
def write(self, x): | |
if isinstance(x, str) or is_bytes(x): | |
try: | |
self.flush() | |
except Exception: | |
pass | |
return self.buffer.write(str(x)) | |
return io.TextIOWrapper.write(self, x) | |
def writelines(self, lines): | |
for line in lines: | |
self.write(line) | |
def __del__(self): | |
try: | |
self.detach() | |
except Exception: | |
pass | |
def isatty(self): | |
# https://bitbucket.org/pypy/pypy/issue/1803 | |
return self._stream.isatty() | |
class _FixupStream(object): | |
"""The new io interface needs more from streams than streams | |
traditionally implement. As such, this fix-up code is necessary in | |
some circumstances. | |
""" | |
def __init__(self, stream): | |
self._stream = stream | |
def __getattr__(self, name): | |
return getattr(self._stream, name) | |
def read1(self, size): | |
f = getattr(self._stream, 'read1', None) | |
if f is not None: | |
return f(size) | |
# We only dispatch to readline instead of read in Python 2 as we | |
# do not want cause problems with the different implementation | |
# of line buffering. | |
if PY2: | |
return self._stream.readline(size) | |
return self._stream.read(size) | |
def readable(self): | |
x = getattr(self._stream, 'readable', None) | |
if x is not None: | |
return x() | |
try: | |
self._stream.read(0) | |
except Exception: | |
return False | |
return True | |
def writable(self): | |
x = getattr(self._stream, 'writable', None) | |
if x is not None: | |
return x() | |
try: | |
self._stream.write('') | |
except Exception: | |
try: | |
self._stream.write(b'') | |
except Exception: | |
return False | |
return True | |
def seekable(self): | |
x = getattr(self._stream, 'seekable', None) | |
if x is not None: | |
return x() | |
try: | |
self._stream.seek(self._stream.tell()) | |
except Exception: | |
return False | |
return True | |
if PY2: | |
text_type = unicode | |
bytes = str | |
raw_input = raw_input | |
string_types = (str, unicode) | |
iteritems = lambda x: x.iteritems() | |
range_type = xrange | |
def is_bytes(x): | |
return isinstance(x, (buffer, bytearray)) | |
_identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$') | |
# For Windows, we need to force stdout/stdin/stderr to binary if it's | |
# fetched for that. This obviously is not the most correct way to do | |
# it as it changes global state. Unfortunately, there does not seem to | |
# be a clear better way to do it as just reopening the file in binary | |
# mode does not change anything. | |
# | |
# An option would be to do what Python 3 does and to open the file as | |
# binary only, patch it back to the system, and then use a wrapper | |
# stream that converts newlines. It's not quite clear what's the | |
# correct option here. | |
# | |
# This code also lives in _winconsole for the fallback to the console | |
# emulation stream. | |
# | |
# There are also Windows environments where the `msvcrt` module is not | |
# available (which is why we use try-catch instead of the WIN variable | |
# here), such as the Google App Engine development server on Windows. In | |
# those cases there is just nothing we can do. | |
try: | |
import msvcrt | |
except ImportError: | |
set_binary_mode = lambda x: x | |
else: | |
def set_binary_mode(f): | |
try: | |
fileno = f.fileno() | |
except Exception: | |
pass | |
else: | |
msvcrt.setmode(fileno, os.O_BINARY) | |
return f | |
def isidentifier(x): | |
return _identifier_re.search(x) is not None | |
def get_binary_stdin(): | |
return set_binary_mode(sys.stdin) | |
def get_binary_stdout(): | |
return set_binary_mode(sys.stdout) | |
def get_binary_stderr(): | |
return set_binary_mode(sys.stderr) | |
def get_text_stdin(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stdin, encoding, errors) | |
if rv is not None: | |
return rv | |
return _make_text_stream(sys.stdin, encoding, errors) | |
def get_text_stdout(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stdout, encoding, errors) | |
if rv is not None: | |
return rv | |
return _make_text_stream(sys.stdout, encoding, errors) | |
def get_text_stderr(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stderr, encoding, errors) | |
if rv is not None: | |
return rv | |
return _make_text_stream(sys.stderr, encoding, errors) | |
def filename_to_ui(value): | |
if isinstance(value, bytes): | |
value = value.decode(get_filesystem_encoding(), 'replace') | |
return value | |
else: | |
import io | |
text_type = str | |
raw_input = input | |
string_types = (str,) | |
range_type = range | |
isidentifier = lambda x: x.isidentifier() | |
iteritems = lambda x: iter(x.items()) | |
def is_bytes(x): | |
return isinstance(x, (bytes, memoryview, bytearray)) | |
def _is_binary_reader(stream, default=False): | |
try: | |
return isinstance(stream.read(0), bytes) | |
except Exception: | |
return default | |
# This happens in some cases where the stream was already | |
# closed. In this case, we assume the default. | |
def _is_binary_writer(stream, default=False): | |
try: | |
stream.write(b'') | |
except Exception: | |
try: | |
stream.write('') | |
return False | |
except Exception: | |
pass | |
return default | |
return True | |
def _find_binary_reader(stream): | |
# We need to figure out if the given stream is already binary. | |
# This can happen because the official docs recommend detaching | |
# the streams to get binary streams. Some code might do this, so | |
# we need to deal with this case explicitly. | |
if _is_binary_reader(stream, False): | |
return stream | |
buf = getattr(stream, 'buffer', None) | |
# Same situation here; this time we assume that the buffer is | |
# actually binary in case it's closed. | |
if buf is not None and _is_binary_reader(buf, True): | |
return buf | |
def _find_binary_writer(stream): | |
# We need to figure out if the given stream is already binary. | |
# This can happen because the official docs recommend detatching | |
# the streams to get binary streams. Some code might do this, so | |
# we need to deal with this case explicitly. | |
if _is_binary_writer(stream, False): | |
return stream | |
buf = getattr(stream, 'buffer', None) | |
# Same situation here; this time we assume that the buffer is | |
# actually binary in case it's closed. | |
if buf is not None and _is_binary_writer(buf, True): | |
return buf | |
def _stream_is_misconfigured(stream): | |
"""A stream is misconfigured if its encoding is ASCII.""" | |
# If the stream does not have an encoding set, we assume it's set | |
# to ASCII. This appears to happen in certain unittest | |
# environments. It's not quite clear what the correct behavior is | |
# but this at least will force Click to recover somehow. | |
return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii') | |
def _is_compatible_text_stream(stream, encoding, errors): | |
stream_encoding = getattr(stream, 'encoding', None) | |
stream_errors = getattr(stream, 'errors', None) | |
# Perfect match. | |
if stream_encoding == encoding and stream_errors == errors: | |
return True | |
# Otherwise, it's only a compatible stream if we did not ask for | |
# an encoding. | |
if encoding is None: | |
return stream_encoding is not None | |
return False | |
def _force_correct_text_reader(text_reader, encoding, errors): | |
if _is_binary_reader(text_reader, False): | |
binary_reader = text_reader | |
else: | |
# If there is no target encoding set, we need to verify that the | |
# reader is not actually misconfigured. | |
if encoding is None and not _stream_is_misconfigured(text_reader): | |
return text_reader | |
if _is_compatible_text_stream(text_reader, encoding, errors): | |
return text_reader | |
# If the reader has no encoding, we try to find the underlying | |
# binary reader for it. If that fails because the environment is | |
# misconfigured, we silently go with the same reader because this | |
# is too common to happen. In that case, mojibake is better than | |
# exceptions. | |
binary_reader = _find_binary_reader(text_reader) | |
if binary_reader is None: | |
return text_reader | |
# At this point, we default the errors to replace instead of strict | |
# because nobody handles those errors anyways and at this point | |
# we're so fundamentally fucked that nothing can repair it. | |
if errors is None: | |
errors = 'replace' | |
return _make_text_stream(binary_reader, encoding, errors) | |
def _force_correct_text_writer(text_writer, encoding, errors): | |
if _is_binary_writer(text_writer, False): | |
binary_writer = text_writer | |
else: | |
# If there is no target encoding set, we need to verify that the | |
# writer is not actually misconfigured. | |
if encoding is None and not _stream_is_misconfigured(text_writer): | |
return text_writer | |
if _is_compatible_text_stream(text_writer, encoding, errors): | |
return text_writer | |
# If the writer has no encoding, we try to find the underlying | |
# binary writer for it. If that fails because the environment is | |
# misconfigured, we silently go with the same writer because this | |
# is too common to happen. In that case, mojibake is better than | |
# exceptions. | |
binary_writer = _find_binary_writer(text_writer) | |
if binary_writer is None: | |
return text_writer | |
# At this point, we default the errors to replace instead of strict | |
# because nobody handles those errors anyways and at this point | |
# we're so fundamentally fucked that nothing can repair it. | |
if errors is None: | |
errors = 'replace' | |
return _make_text_stream(binary_writer, encoding, errors) | |
def get_binary_stdin(): | |
reader = _find_binary_reader(sys.stdin) | |
if reader is None: | |
raise RuntimeError('Was not able to determine binary ' | |
'stream for sys.stdin.') | |
return reader | |
def get_binary_stdout(): | |
writer = _find_binary_writer(sys.stdout) | |
if writer is None: | |
raise RuntimeError('Was not able to determine binary ' | |
'stream for sys.stdout.') | |
return writer | |
def get_binary_stderr(): | |
writer = _find_binary_writer(sys.stderr) | |
if writer is None: | |
raise RuntimeError('Was not able to determine binary ' | |
'stream for sys.stderr.') | |
return writer | |
def get_text_stdin(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stdin, encoding, errors) | |
if rv is not None: | |
return rv | |
return _force_correct_text_reader(sys.stdin, encoding, errors) | |
def get_text_stdout(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stdout, encoding, errors) | |
if rv is not None: | |
return rv | |
return _force_correct_text_writer(sys.stdout, encoding, errors) | |
def get_text_stderr(encoding=None, errors=None): | |
rv = _get_windows_console_stream(sys.stderr, encoding, errors) | |
if rv is not None: | |
return rv | |
return _force_correct_text_writer(sys.stderr, encoding, errors) | |
def filename_to_ui(value): | |
if isinstance(value, bytes): | |
value = value.decode(get_filesystem_encoding(), 'replace') | |
else: | |
value = value.encode('utf-8', 'surrogateescape') \ | |
.decode('utf-8', 'replace') | |
return value | |
def get_streerror(e, default=None): | |
if hasattr(e, 'strerror'): | |
msg = e.strerror | |
else: | |
if default is not None: | |
msg = default | |
else: | |
msg = str(e) | |
if isinstance(msg, bytes): | |
msg = msg.decode('utf-8', 'replace') | |
return msg | |
def open_stream(filename, mode='r', encoding=None, errors='strict', | |
atomic=False): | |
# Standard streams first. These are simple because they don't need | |
# special handling for the atomic flag. It's entirely ignored. | |
if filename == '-': | |
if 'w' in mode: | |
if 'b' in mode: | |
return get_binary_stdout(), False | |
return get_text_stdout(encoding=encoding, errors=errors), False | |
if 'b' in mode: | |
return get_binary_stdin(), False | |
return get_text_stdin(encoding=encoding, errors=errors), False | |
# Non-atomic writes directly go out through the regular open functions. | |
if not atomic: | |
if encoding is None: | |
return open(filename, mode), True | |
return io.open(filename, mode, encoding=encoding, errors=errors), True | |
# Some usability stuff for atomic writes | |
if 'a' in mode: | |
raise ValueError( | |
'Appending to an existing file is not supported, because that ' | |
'would involve an expensive `copy`-operation to a temporary ' | |
'file. Open the file in normal `w`-mode and copy explicitly ' | |
'if that\'s what you\'re after.' | |
) | |
if 'x' in mode: | |
raise ValueError('Use the `overwrite`-parameter instead.') | |
if 'w' not in mode: | |
raise ValueError('Atomic writes only make sense with `w`-mode.') | |
# Atomic writes are more complicated. They work by opening a file | |
# as a proxy in the same folder and then using the fdopen | |
# functionality to wrap it in a Python file. Then we wrap it in an | |
# atomic file that moves the file over on close. | |
import tempfile | |
fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename), | |
prefix='.__atomic-write') | |
if encoding is not None: | |
f = io.open(fd, mode, encoding=encoding, errors=errors) | |
else: | |
f = os.fdopen(fd, mode) | |
return _AtomicFile(f, tmp_filename, filename), True | |
# Used in a destructor call, needs extra protection from interpreter cleanup. | |
if hasattr(os, 'replace'): | |
_replace = os.replace | |
_can_replace = True | |
else: | |
_replace = os.rename | |
_can_replace = not WIN | |
class _AtomicFile(object): | |
def __init__(self, f, tmp_filename, real_filename): | |
self._f = f | |
self._tmp_filename = tmp_filename | |
self._real_filename = real_filename | |
self.closed = False | |
@property | |
def name(self): | |
return self._real_filename | |
def close(self, delete=False): | |
if self.closed: | |
return | |
self._f.close() | |
if not _can_replace: | |
try: | |
os.remove(self._real_filename) | |
except OSError: | |
pass | |
_replace(self._tmp_filename, self._real_filename) | |
self.closed = True | |
def __getattr__(self, name): | |
return getattr(self._f, name) | |
def __enter__(self): | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self.close(delete=exc_type is not None) | |
def __repr__(self): | |
return repr(self._f) | |
auto_wrap_for_ansi = None | |
colorama = None | |
get_winterm_size = None | |
def strip_ansi(value): | |
return _ansi_re.sub('', value) | |
def should_strip_ansi(stream=None, color=None): | |
if color is None: | |
if stream is None: | |
stream = sys.stdin | |
return not isatty(stream) | |
return not color | |
# If we're on Windows, we provide transparent integration through | |
# colorama. This will make ANSI colors through the echo function | |
# work automatically. | |
if WIN: | |
# Windows has a smaller terminal | |
DEFAULT_COLUMNS = 79 | |
from ._winconsole import _get_windows_console_stream | |
def _get_argv_encoding(): | |
import locale | |
return locale.getpreferredencoding() | |
if PY2: | |
def raw_input(prompt=''): | |
sys.stderr.flush() | |
if prompt: | |
stdout = _default_text_stdout() | |
stdout.write(prompt) | |
stdin = _default_text_stdin() | |
return stdin.readline().rstrip('\r\n') | |
try: | |
import colorama | |
except ImportError: | |
pass | |
else: | |
_ansi_stream_wrappers = WeakKeyDictionary() | |
def auto_wrap_for_ansi(stream, color=None): | |
"""This function wraps a stream so that calls through colorama | |
are issued to the win32 console API to recolor on demand. It | |
also ensures to reset the colors if a write call is interrupted | |
to not destroy the console afterwards. | |
""" | |
try: | |
cached = _ansi_stream_wrappers.get(stream) | |
except Exception: | |
cached = None | |
if cached is not None: | |
return cached | |
strip = should_strip_ansi(stream, color) | |
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) | |
rv = ansi_wrapper.stream | |
_write = rv.write | |
def _safe_write(s): | |
try: | |
return _write(s) | |
except: | |
ansi_wrapper.reset_all() | |
raise | |
rv.write = _safe_write | |
try: | |
_ansi_stream_wrappers[stream] = rv | |
except Exception: | |
pass | |
return rv | |
def get_winterm_size(): | |
win = colorama.win32.GetConsoleScreenBufferInfo( | |
colorama.win32.STDOUT).srWindow | |
return win.Right - win.Left, win.Bottom - win.Top | |
else: | |
def _get_argv_encoding(): | |
return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() | |
_get_windows_console_stream = lambda *x: None | |
def term_len(x): | |
return len(strip_ansi(x)) | |
def isatty(stream): | |
try: | |
return stream.isatty() | |
except Exception: | |
return False | |
def _make_cached_stream_func(src_func, wrapper_func): | |
cache = WeakKeyDictionary() | |
def func(): | |
stream = src_func() | |
try: | |
rv = cache.get(stream) | |
except Exception: | |
rv = None | |
if rv is not None: | |
return rv | |
rv = wrapper_func() | |
try: | |
cache[stream] = rv | |
except Exception: | |
pass | |
return rv | |
return func | |
_default_text_stdin = _make_cached_stream_func( | |
lambda: sys.stdin, get_text_stdin) | |
_default_text_stdout = _make_cached_stream_func( | |
lambda: sys.stdout, get_text_stdout) | |
_default_text_stderr = _make_cached_stream_func( | |
lambda: sys.stderr, get_text_stderr) | |
binary_streams = { | |
'stdin': get_binary_stdin, | |
'stdout': get_binary_stdout, | |
'stderr': get_binary_stderr, | |
} | |
text_streams = { | |
'stdin': get_text_stdin, | |
'stdout': get_text_stdout, | |
'stderr': get_text_stderr, | |
} |
""" | |
click._termui_impl | |
~~~~~~~~~~~~~~~~~~ | |
This module contains implementations for the termui module. To keep the | |
import time of Click down, some infrequently used functionality is placed | |
in this module and only imported as needed. | |
:copyright: (c) 2014 by Armin Ronacher. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import sys | |
import time | |
import math | |
from ._compat import _default_text_stdout, range_type, PY2, isatty, \ | |
open_stream, strip_ansi, term_len, get_best_encoding, WIN | |
from .utils import echo | |
from .exceptions import ClickException | |
if os.name == 'nt': | |
BEFORE_BAR = '\r' | |
AFTER_BAR = '\n' | |
else: | |
BEFORE_BAR = '\r\033[?25l' | |
AFTER_BAR = '\033[?25h\n' | |
def _length_hint(obj): | |
"""Returns the length hint of an object.""" | |
try: | |
return len(obj) | |
except (AttributeError, TypeError): | |
try: | |
get_hint = type(obj).__length_hint__ | |
except AttributeError: | |
return None | |
try: | |
hint = get_hint(obj) | |
except TypeError: | |
return None | |
if hint is NotImplemented or \ | |
not isinstance(hint, (int, long)) or \ | |
hint < 0: | |
return None | |
return hint | |
class ProgressBar(object): | |
def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', | |
bar_template='%(bar)s', info_sep=' ', show_eta=True, | |
show_percent=None, show_pos=False, item_show_func=None, | |
label=None, file=None, color=None, width=30): | |
self.fill_char = fill_char | |
self.empty_char = empty_char | |
self.bar_template = bar_template | |
self.info_sep = info_sep | |
self.show_eta = show_eta | |
self.show_percent = show_percent | |
self.show_pos = show_pos | |
self.item_show_func = item_show_func | |
self.label = label or '' | |
if file is None: | |
file = _default_text_stdout() | |
self.file = file | |
self.color = color | |
self.width = width | |
self.autowidth = width == 0 | |
if length is None: | |
length = _length_hint(iterable) | |
if iterable is None: | |
if length is None: | |
raise TypeError('iterable or length is required') | |
iterable = range_type(length) | |
self.iter = iter(iterable) | |
self.length = length | |
self.length_known = length is not None | |
self.pos = 0 | |
self.avg = [] | |
self.start = self.last_eta = time.time() | |
self.eta_known = False | |
self.finished = False | |
self.max_width = None | |
self.entered = False | |
self.current_item = None | |
self.is_hidden = not isatty(self.file) | |
self._last_line = None | |
def __enter__(self): | |
self.entered = True | |
self.render_progress() | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self.render_finish() | |
def __iter__(self): | |
if not self.entered: | |
raise RuntimeError('You need to use progress bars in a with block.') | |
self.render_progress() | |
return self | |
def render_finish(self): | |
if self.is_hidden: | |
return | |
self.file.write(AFTER_BAR) | |
self.file.flush() | |
@property | |
def pct(self): | |
if self.finished: | |
return 1.0 | |
return min(self.pos / (float(self.length) or 1), 1.0) | |
@property | |
def time_per_iteration(self): | |
if not self.avg: | |
return 0.0 | |
return sum(self.avg) / float(len(self.avg)) | |
@property | |
def eta(self): | |
if self.length_known and not self.finished: | |
return self.time_per_iteration * (self.length - self.pos) | |
return 0.0 | |
def format_eta(self): | |
if self.eta_known: | |
t = self.eta + 1 | |
seconds = t % 60 | |
t /= 60 | |
minutes = t % 60 | |
t /= 60 | |
hours = t % 24 | |
t /= 24 | |
if t > 0: | |
days = t | |
return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) | |
else: | |
return '%02d:%02d:%02d' % (hours, minutes, seconds) | |
return '' | |
def format_pos(self): | |
pos = str(self.pos) | |
if self.length_known: | |
pos += '/%s' % self.length | |
return pos | |
def format_pct(self): | |
return ('% 4d%%' % int(self.pct * 100))[1:] | |
def format_progress_line(self): | |
show_percent = self.show_percent | |
info_bits = [] | |
if self.length_known: | |
bar_length = int(self.pct * self.width) | |
bar = self.fill_char * bar_length | |
bar += self.empty_char * (self.width - bar_length) | |
if show_percent is None: | |
show_percent = not self.show_pos | |
else: | |
if self.finished: | |
bar = self.fill_char * self.width | |
else: | |
bar = list(self.empty_char * (self.width or 1)) | |
if self.time_per_iteration != 0: | |
bar[int((math.cos(self.pos * self.time_per_iteration) | |
/ 2.0 + 0.5) * self.width)] = self.fill_char | |
bar = ''.join(bar) | |
if self.show_pos: | |
info_bits.append(self.format_pos()) | |
if show_percent: | |
info_bits.append(self.format_pct()) | |
if self.show_eta and self.eta_known and not self.finished: | |
info_bits.append(self.format_eta()) | |
if self.item_show_func is not None: | |
item_info = self.item_show_func(self.current_item) | |
if item_info is not None: | |
info_bits.append(item_info) | |
return (self.bar_template % { | |
'label': self.label, | |
'bar': bar, | |
'info': self.info_sep.join(info_bits) | |
}).rstrip() | |
def render_progress(self): | |
from .termui import get_terminal_size | |
nl = False | |
if self.is_hidden: | |
buf = [self.label] | |
nl = True | |
else: | |
buf = [] | |
# Update width in case the terminal has been resized | |
if self.autowidth: | |
old_width = self.width | |
self.width = 0 | |
clutter_length = term_len(self.format_progress_line()) | |
new_width = max(0, get_terminal_size()[0] - clutter_length) | |
if new_width < old_width: | |
buf.append(BEFORE_BAR) | |
buf.append(' ' * self.max_width) | |
self.max_width = new_width | |
self.width = new_width | |
clear_width = self.width | |
if self.max_width is not None: | |
clear_width = self.max_width | |
buf.append(BEFORE_BAR) | |
line = self.format_progress_line() | |
line_len = term_len(line) | |
if self.max_width is None or self.max_width < line_len: | |
self.max_width = line_len | |
buf.append(line) | |
buf.append(' ' * (clear_width - line_len)) | |
line = ''.join(buf) | |
# Render the line only if it changed. | |
if line != self._last_line: | |
self._last_line = line | |
echo(line, file=self.file, color=self.color, nl=nl) | |
self.file.flush() | |
def make_step(self, n_steps): | |
self.pos += n_steps | |
if self.length_known and self.pos >= self.length: | |
self.finished = True | |
if (time.time() - self.last_eta) < 1.0: | |
return | |
self.last_eta = time.time() | |
self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)] | |
self.eta_known = self.length_known | |
def update(self, n_steps): | |
self.make_step(n_steps) | |
self.render_progress() | |
def finish(self): | |
self.eta_known = 0 | |
self.current_item = None | |
self.finished = True | |
def next(self): | |
if self.is_hidden: | |
return next(self.iter) | |
try: | |
rv = next(self.iter) | |
self.current_item = rv | |
except StopIteration: | |
self.finish() | |
self.render_progress() | |
raise StopIteration() | |
else: | |
self.update(1) | |
return rv | |
if not PY2: | |
__next__ = next | |
del next | |
def pager(text, color=None): | |
"""Decide what method to use for paging through text.""" | |
stdout = _default_text_stdout() | |
if not isatty(sys.stdin) or not isatty(stdout): | |
return _nullpager(stdout, text, color) | |
pager_cmd = (os.environ.get('PAGER', None) or '').strip() | |
if pager_cmd: | |
if WIN: | |
return _tempfilepager(text, pager_cmd, color) | |
return _pipepager(text, pager_cmd, color) | |
if os.environ.get('TERM') in ('dumb', 'emacs'): | |
return _nullpager(stdout, text, color) | |
if WIN or sys.platform.startswith('os2'): | |
return _tempfilepager(text, 'more <', color) | |
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: | |
return _pipepager(text, 'less', color) | |
import tempfile | |
fd, filename = tempfile.mkstemp() | |
os.close(fd) | |
try: | |
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: | |
return _pipepager(text, 'more', color) | |
return _nullpager(stdout, text, color) | |
finally: | |
os.unlink(filename) | |
def _pipepager(text, cmd, color): | |
"""Page through text by feeding it to another program. Invoking a | |
pager through this might support colors. | |
""" | |
import subprocess | |
env = dict(os.environ) | |
# If we're piping to less we might support colors under the | |
# condition that | |
cmd_detail = cmd.rsplit('/', 1)[-1].split() | |
if color is None and cmd_detail[0] == 'less': | |
less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) | |
if not less_flags: | |
env['LESS'] = '-R' | |
color = True | |
elif 'r' in less_flags or 'R' in less_flags: | |
color = True | |
if not color: | |
text = strip_ansi(text) | |
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, | |
env=env) | |
encoding = get_best_encoding(c.stdin) | |
try: | |
c.stdin.write(text.encode(encoding, 'replace')) | |
c.stdin.close() | |
except (IOError, KeyboardInterrupt): | |
pass | |
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting | |
# search or other commands inside less). | |
# | |
# That means when the user hits ^C, the parent process (click) terminates, | |
# but less is still alive, paging the output and messing up the terminal. | |
# | |
# If the user wants to make the pager exit on ^C, they should set | |
# `LESS='-K'`. It's not our decision to make. | |
while True: | |
try: | |
c.wait() | |
except KeyboardInterrupt: | |
pass | |
else: | |
break | |
def _tempfilepager(text, cmd, color): | |
"""Page through text by invoking a program on a temporary file.""" | |
import tempfile | |
filename = tempfile.mktemp() | |
if not color: | |
text = strip_ansi(text) | |
encoding = get_best_encoding(sys.stdout) | |
with open_stream(filename, 'wb')[0] as f: | |
f.write(text.encode(encoding)) | |
try: | |
os.system(cmd + ' "' + filename + '"') | |
finally: | |
os.unlink(filename) | |
def _nullpager(stream, text, color): | |
"""Simply print unformatted text. This is the ultimate fallback.""" | |
if not color: | |
text = strip_ansi(text) | |
stream.write(text) | |
class Editor(object): | |
def __init__(self, editor=None, env=None, require_save=True, | |
extension='.txt'): | |
self.editor = editor | |
self.env = env | |
self.require_save = require_save | |
self.extension = extension | |
def get_editor(self): | |
if self.editor is not None: | |
return self.editor | |
for key in 'VISUAL', 'EDITOR': | |
rv = os.environ.get(key) | |
if rv: | |
return rv | |
if WIN: | |
return 'notepad' | |
for editor in 'vim', 'nano': | |
if os.system('which %s >/dev/null 2>&1' % editor) == 0: | |
return editor | |
return 'vi' | |
def edit_file(self, filename): | |
import subprocess | |
editor = self.get_editor() | |
if self.env: | |
environ = os.environ.copy() | |
environ.update(self.env) | |
else: | |
environ = None | |
try: | |
c = subprocess.Popen('%s "%s"' % (editor, filename), | |
env=environ, shell=True) | |
exit_code = c.wait() | |
if exit_code != 0: | |
raise ClickException('%s: Editing failed!' % editor) | |
except OSError as e: | |
raise ClickException('%s: Editing failed: %s' % (editor, e)) | |
def edit(self, text): | |
import tempfile | |
text = text or '' | |
if text and not text.endswith('\n'): | |
text += '\n' | |
fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) | |
try: | |
if WIN: | |
encoding = 'utf-8-sig' | |
text = text.replace('\n', '\r\n') | |
else: | |
encoding = 'utf-8' | |
text = text.encode(encoding) | |
f = os.fdopen(fd, 'wb') | |
f.write(text) | |
f.close() | |
timestamp = os.path.getmtime(name) | |
self.edit_file(name) | |
if self.require_save \ | |
and os.path.getmtime(name) == timestamp: | |
return None | |
f = open(name, 'rb') | |
try: | |
rv = f.read() | |
finally: | |
f.close() | |
return rv.decode('utf-8-sig').replace('\r\n', '\n') | |
finally: | |
os.unlink(name) | |
def open_url(url, wait=False, locate=False): | |
import subprocess | |
def _unquote_file(url): | |
try: | |
import urllib | |
except ImportError: | |
import urllib | |
if url.startswith('file://'): | |
url = urllib.unquote(url[7:]) | |
return url | |
if sys.platform == 'darwin': | |
args = ['open'] | |
if wait: | |
args.append('-W') | |
if locate: | |
args.append('-R') | |
args.append(_unquote_file(url)) | |
null = open('/dev/null', 'w') | |
try: | |
return subprocess.Popen(args, stderr=null).wait() | |
finally: | |
null.close() | |
elif WIN: | |
if locate: | |
url = _unquote_file(url) | |
args = 'explorer /select,"%s"' % _unquote_file( | |
url.replace('"', '')) | |
else: | |
args = 'start %s "" "%s"' % ( | |
wait and '/WAIT' or '', url.replace('"', '')) | |
return os.system(args) | |
try: | |
if locate: | |
url = os.path.dirname(_unquote_file(url)) or '.' | |
else: | |
url = _unquote_file(url) | |
c = subprocess.Popen(['xdg-open', url]) | |
if wait: | |
return c.wait() | |
return 0 | |
except OSError: | |
if url.startswith(('http://', 'https://')) and not locate and not wait: | |
import webbrowser | |
webbrowser.open(url) | |
return 0 | |
return 1 | |
def _translate_ch_to_exc(ch): | |
if ch == '\x03': | |
raise KeyboardInterrupt() | |
if ch == '\x04': | |
raise EOFError() | |
if WIN: | |
import msvcrt | |
def getchar(echo): | |
rv = msvcrt.getch() | |
if echo: | |
msvcrt.putchar(rv) | |
_translate_ch_to_exc(rv) | |
if PY2: | |
enc = getattr(sys.stdin, 'encoding', None) | |
if enc is not None: | |
rv = rv.decode(enc, 'replace') | |
else: | |
rv = rv.decode('cp1252', 'replace') | |
return rv | |
else: | |
import tty | |
import termios | |
def getchar(echo): | |
if not isatty(sys.stdin): | |
f = open('/dev/tty') | |
fd = f.fileno() | |
else: | |
fd = sys.stdin.fileno() | |
f = None | |
try: | |
old_settings = termios.tcgetattr(fd) | |
try: | |
tty.setraw(fd) | |
ch = os.read(fd, 32) | |
if echo and isatty(sys.stdout): | |
sys.stdout.write(ch) | |
finally: | |
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) | |
sys.stdout.flush() | |
if f is not None: | |
f.close() | |
except termios.error: | |
pass | |
_translate_ch_to_exc(ch) | |
return ch.decode(get_best_encoding(sys.stdin), 'replace') |
import textwrap | |
from contextlib import contextmanager | |
class TextWrapper(textwrap.TextWrapper): | |
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): | |
space_left = max(width - cur_len, 1) | |
if self.break_long_words: | |
last = reversed_chunks[-1] | |
cut = last[:space_left] | |
res = last[space_left:] | |
cur_line.append(cut) | |
reversed_chunks[-1] = res | |
elif not cur_line: | |
cur_line.append(reversed_chunks.pop()) | |
@contextmanager | |
def extra_indent(self, indent): | |
old_initial_indent = self.initial_indent | |
old_subsequent_indent = self.subsequent_indent | |
self.initial_indent += indent | |
self.subsequent_indent += indent | |
try: | |
yield | |
finally: | |
self.initial_indent = old_initial_indent | |
self.subsequent_indent = old_subsequent_indent | |
def indent_only(self, text): | |
rv = [] | |
for idx, line in enumerate(text.splitlines()): | |
indent = self.initial_indent | |
if idx > 0: | |
indent = self.subsequent_indent | |
rv.append(indent + line) | |
return '\n'.join(rv) |
import os | |
import sys | |
import codecs | |
from ._compat import PY2 | |
# If someone wants to vendor click, we want to ensure the | |
# correct package is discovered. Ideally we could use a | |
# relative import here but unfortunately Python does not | |
# support that. | |
click = sys.modules[__name__.rsplit('.', 1)[0]] | |
def _find_unicode_literals_frame(): | |
import __future__ | |
frm = sys._getframe(1) | |
idx = 1 | |
while frm is not None: | |
if frm.f_globals.get('__name__', '').startswith('click.'): | |
frm = frm.f_back | |
idx += 1 | |
elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: | |
return idx | |
else: | |
break | |
return 0 | |
def _check_for_unicode_literals(): | |
if not __debug__: | |
return | |
if not PY2 or click.disable_unicode_literals_warning: | |
return | |
bad_frame = _find_unicode_literals_frame() | |
if bad_frame <= 0: | |
return | |
from warnings import warn | |
warn(Warning('Click detected the use of the unicode_literals ' | |
'__future__ import. This is heavily discouraged ' | |
'because it can introduce subtle bugs in your ' | |
'code. You should instead use explicit u"" literals ' | |
'for your unicode strings. For more information see ' | |
'http://click.pocoo.org/python3/'), | |
stacklevel=bad_frame) | |
def _verify_python3_env(): | |
"""Ensures that the environment is good for unicode on Python 3.""" | |
if PY2: | |
return | |
try: | |
import locale | |
fs_enc = codecs.lookup(locale.getpreferredencoding()).name | |
except Exception: | |
fs_enc = 'ascii' | |
if fs_enc != 'ascii': | |
return | |
extra = '' | |
if os.name == 'posix': | |
import subprocess | |
rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, | |
stderr=subprocess.PIPE).communicate()[0] | |
good_locales = set() | |
has_c_utf8 = False | |
# Make sure we're operating on text here. | |
if isinstance(rv, bytes): | |
rv = rv.decode('ascii', 'replace') | |
for line in rv.splitlines(): | |
locale = line.strip() | |
if locale.lower().endswith(('.utf-8', '.utf8')): | |
good_locales.add(locale) | |
if locale.lower() in ('c.utf8', 'c.utf-8'): | |
has_c_utf8 = True | |
extra += '\n\n' | |
if not good_locales: | |
extra += ( | |
'Additional information: on this system no suitable UTF-8\n' | |
'locales were discovered. This most likely requires resolving\n' | |
'by reconfiguring the locale system.' | |
) | |
elif has_c_utf8: | |
extra += ( | |
'This system supports the C.UTF-8 locale which is recommended.\n' | |
'You might be able to resolve your issue by exporting the\n' | |
'following environment variables:\n\n' | |
' export LC_ALL=C.UTF-8\n' | |
' export LANG=C.UTF-8' | |
) | |
else: | |
extra += ( | |
'This system lists a couple of UTF-8 supporting locales that\n' | |
'you can pick from. The following suitable locales where\n' | |
'discovered: %s' | |
) % ', '.join(sorted(good_locales)) | |
bad_locale = None | |
for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): | |
if locale and locale.lower().endswith(('.utf-8', '.utf8')): | |
bad_locale = locale | |
if locale is not None: | |
break | |
if bad_locale is not None: | |
extra += ( | |
'\n\nClick discovered that you exported a UTF-8 locale\n' | |
'but the locale system could not pick up from it because\n' | |
'it does not exist. The exported locale is "%s" but it\n' | |
'is not supported' | |
) % bad_locale | |
raise RuntimeError('Click will abort further execution because Python 3 ' | |
'was configured to use ASCII as encoding for the ' | |
'environment. Consult http://click.pocoo.org/python3/' | |
'for mitigation steps.' + extra) |
# -*- coding: utf-8 -*- | |
# This module is based on the excellent work by Adam Bartoš who | |
# provided a lot of what went into the implementation here in | |
# the discussion to issue1602 in the Python bug tracker. | |
# | |
# There are some general differences in regards to how this works | |
# compared to the original patches as we do not need to patch | |
# the entire interpreter but just work in our little world of | |
# echo and prmopt. | |
import io | |
import os | |
import sys | |
import zlib | |
import time | |
import ctypes | |
import msvcrt | |
from click._compat import _NonClosingTextIOWrapper, text_type, PY2 | |
from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ | |
c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE | |
try: | |
from ctypes import pythonapi | |
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer | |
PyBuffer_Release = pythonapi.PyBuffer_Release | |
except ImportError: | |
pythonapi = None | |
from ctypes.wintypes import LPWSTR, LPCWSTR | |
c_ssize_p = POINTER(c_ssize_t) | |
kernel32 = windll.kernel32 | |
GetStdHandle = kernel32.GetStdHandle | |
ReadConsoleW = kernel32.ReadConsoleW | |
WriteConsoleW = kernel32.WriteConsoleW | |
GetLastError = kernel32.GetLastError | |
GetCommandLineW = WINFUNCTYPE(LPWSTR)( | |
('GetCommandLineW', windll.kernel32)) | |
CommandLineToArgvW = WINFUNCTYPE( | |
POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( | |
('CommandLineToArgvW', windll.shell32)) | |
STDIN_HANDLE = GetStdHandle(-10) | |
STDOUT_HANDLE = GetStdHandle(-11) | |
STDERR_HANDLE = GetStdHandle(-12) | |
PyBUF_SIMPLE = 0 | |
PyBUF_WRITABLE = 1 | |
ERROR_SUCCESS = 0 | |
ERROR_NOT_ENOUGH_MEMORY = 8 | |
ERROR_OPERATION_ABORTED = 995 | |
STDIN_FILENO = 0 | |
STDOUT_FILENO = 1 | |
STDERR_FILENO = 2 | |
EOF = b'\x1a' | |
MAX_BYTES_WRITTEN = 32767 | |
class Py_buffer(ctypes.Structure): | |
_fields_ = [ | |
('buf', c_void_p), | |
('obj', py_object), | |
('len', c_ssize_t), | |
('itemsize', c_ssize_t), | |
('readonly', c_int), | |
('ndim', c_int), | |
('format', c_char_p), | |
('shape', c_ssize_p), | |
('strides', c_ssize_p), | |
('suboffsets', c_ssize_p), | |
('internal', c_void_p) | |
] | |
if PY2: | |
_fields_.insert(-1, ('smalltable', c_ssize_t * 2)) | |
# On PyPy we cannot get buffers so our ability to operate here is | |
# serverly limited. | |
if pythonapi is None: | |
get_buffer = None | |
else: | |
def get_buffer(obj, writable=False): | |
buf = Py_buffer() | |
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE | |
PyObject_GetBuffer(py_object(obj), byref(buf), flags) | |
try: | |
buffer_type = c_char * buf.len | |
return buffer_type.from_address(buf.buf) | |
finally: | |
PyBuffer_Release(byref(buf)) | |
class _WindowsConsoleRawIOBase(io.RawIOBase): | |
def __init__(self, handle): | |
self.handle = handle | |
def isatty(self): | |
io.RawIOBase.isatty(self) | |
return True | |
class _WindowsConsoleReader(_WindowsConsoleRawIOBase): | |
def readable(self): | |
return True | |
def readinto(self, b): | |
bytes_to_be_read = len(b) | |
if not bytes_to_be_read: | |
return 0 | |
elif bytes_to_be_read % 2: | |
raise ValueError('cannot read odd number of bytes from ' | |
'UTF-16-LE encoded console') | |
buffer = get_buffer(b, writable=True) | |
code_units_to_be_read = bytes_to_be_read // 2 | |
code_units_read = c_ulong() | |
rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read, | |
byref(code_units_read), None) | |
if GetLastError() == ERROR_OPERATION_ABORTED: | |
# wait for KeyboardInterrupt | |
time.sleep(0.1) | |
if not rv: | |
raise OSError('Windows error: %s' % GetLastError()) | |
if buffer[0] == EOF: | |
return 0 | |
return 2 * code_units_read.value | |
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): | |
def writable(self): | |
return True | |
@staticmethod | |
def _get_error_message(errno): | |
if errno == ERROR_SUCCESS: | |
return 'ERROR_SUCCESS' | |
elif errno == ERROR_NOT_ENOUGH_MEMORY: | |
return 'ERROR_NOT_ENOUGH_MEMORY' | |
return 'Windows error %s' % errno | |
def write(self, b): | |
bytes_to_be_written = len(b) | |
buf = get_buffer(b) | |
code_units_to_be_written = min(bytes_to_be_written, | |
MAX_BYTES_WRITTEN) // 2 | |
code_units_written = c_ulong() | |
WriteConsoleW(self.handle, buf, code_units_to_be_written, | |
byref(code_units_written), None) | |
bytes_written = 2 * code_units_written.value | |
if bytes_written == 0 and bytes_to_be_written > 0: | |
raise OSError(self._get_error_message(GetLastError())) | |
return bytes_written | |
class ConsoleStream(object): | |
def __init__(self, text_stream, byte_stream): | |
self._text_stream = text_stream | |
self.buffer = byte_stream | |
@property | |
def name(self): | |
return self.buffer.name | |
def write(self, x): | |
if isinstance(x, text_type): | |
return self._text_stream.write(x) | |
try: | |
self.flush() | |
except Exception: | |
pass | |
return self.buffer.write(x) | |
def writelines(self, lines): | |
for line in lines: | |
self.write(line) | |
def __getattr__(self, name): | |
return getattr(self._text_stream, name) | |
def isatty(self): | |
return self.buffer.isatty() | |
def __repr__(self): | |
return '<ConsoleStream name=%r encoding=%r>' % ( | |
self.name, | |
self.encoding, | |
) | |
def _get_text_stdin(buffer_stream): | |
text_stream = _NonClosingTextIOWrapper( | |
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), | |
'utf-16-le', 'strict', line_buffering=True) | |
return ConsoleStream(text_stream, buffer_stream) | |
def _get_text_stdout(buffer_stream): | |
text_stream = _NonClosingTextIOWrapper( | |
_WindowsConsoleWriter(STDOUT_HANDLE), | |
'utf-16-le', 'strict', line_buffering=True) | |
return ConsoleStream(text_stream, buffer_stream) | |
def _get_text_stderr(buffer_stream): | |
text_stream = _NonClosingTextIOWrapper( | |
_WindowsConsoleWriter(STDERR_HANDLE), | |
'utf-16-le', 'strict', line_buffering=True) | |
return ConsoleStream(text_stream, buffer_stream) | |
if PY2: | |
def _hash_py_argv(): | |
return zlib.crc32('\x00'.join(sys.argv[1:])) | |
_initial_argv_hash = _hash_py_argv() | |
def _get_windows_argv(): | |
argc = c_int(0) | |
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) | |
argv = [argv_unicode[i] for i in range(0, argc.value)] | |
if not hasattr(sys, 'frozen'): | |
argv = argv[1:] | |
while len(argv) > 0: | |
arg = argv[0] | |
if not arg.startswith('-') or arg == '-': | |
break | |
argv = argv[1:] | |
if arg.startswith(('-c', '-m')): | |
break | |
return argv[1:] | |
_stream_factories = { | |
0: _get_text_stdin, | |
1: _get_text_stdout, | |
2: _get_text_stderr, | |
} | |
def _get_windows_console_stream(f, encoding, errors): | |
if get_buffer is not None and \ | |
encoding in ('utf-16-le', None) \ | |
and errors in ('strict', None) and \ | |
hasattr(f, 'isatty') and f.isatty(): | |
func = _stream_factories.get(f.fileno()) | |
if func is not None: | |
if not PY2: | |
f = getattr(f, 'buffer') | |
if f is None: | |
return None | |
else: | |
# If we are on Python 2 we need to set the stream that we | |
# deal with to binary mode as otherwise the exercise if a | |
# bit moot. The same problems apply as for | |
# get_binary_stdin and friends from _compat. | |
msvcrt.setmode(f.fileno(), os.O_BINARY) | |
return func(f) |
import errno | |
import os | |
import sys | |
from contextlib import contextmanager | |
from itertools import repeat | |
from functools import update_wrapper | |
from .types import convert_type, IntRange, BOOL | |
from .utils import make_str, make_default_short_help, echo, get_os_args | |
from .exceptions import ClickException, UsageError, BadParameter, Abort, \ | |
MissingParameter | |
from .termui import prompt, confirm | |
from .formatting import HelpFormatter, join_options | |
from .parser import OptionParser, split_opt | |
from .globals import push_context, pop_context | |
from ._compat import PY2, isidentifier, iteritems | |
from ._unicodefun import _check_for_unicode_literals, _verify_python3_env | |
_missing = object() | |
SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...' | |
SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...' | |
def _bashcomplete(cmd, prog_name, complete_var=None): | |
"""Internal handler for the bash completion support.""" | |
if complete_var is None: | |
complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() | |
complete_instr = os.environ.get(complete_var) | |
if not complete_instr: | |
return | |
from ._bashcomplete import bashcomplete | |
if bashcomplete(cmd, prog_name, complete_var, complete_instr): | |
sys.exit(1) | |
def _check_multicommand(base_command, cmd_name, cmd, register=False): | |
if not base_command.chain or not isinstance(cmd, MultiCommand): | |
return | |
if register: | |
hint = 'It is not possible to add multi commands as children to ' \ | |
'another multi command that is in chain mode' | |
else: | |
hint = 'Found a multi command as subcommand to a multi command ' \ | |
'that is in chain mode. This is not supported' | |
raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ' | |
'added as subcommand but it in itself is a ' | |
'multi command. ("%s" is a %s within a chained ' | |
'%s named "%s"). This restriction was supposed to ' | |
'be lifted in 6.0 but the fix was flawed. This ' | |
'will be fixed in Click 7.0' % ( | |
hint, base_command.name, cmd_name, | |
cmd_name, cmd.__class__.__name__, | |
base_command.__class__.__name__, | |
base_command.name)) | |
def batch(iterable, batch_size): | |
return list(zip(*repeat(iter(iterable), batch_size))) | |
def invoke_param_callback(callback, ctx, param, value): | |
code = getattr(callback, '__code__', None) | |
args = getattr(code, 'co_argcount', 3) | |
if args < 3: | |
# This will become a warning in Click 3.0: | |
from warnings import warn | |
warn(Warning('Invoked legacy parameter callback "%s". The new ' | |
'signature for such callbacks starting with ' | |
'click 2.0 is (ctx, param, value).' | |
% callback), stacklevel=3) | |
return callback(ctx, value) | |
return callback(ctx, param, value) | |
@contextmanager | |
def augment_usage_errors(ctx, param=None): | |
"""Context manager that attaches extra information to exceptions that | |
fly. | |
""" | |
try: | |
yield | |
except BadParameter as e: | |
if e.ctx is None: | |
e.ctx = ctx | |
if param is not None and e.param is None: | |
e.param = param | |
raise | |
except UsageError as e: | |
if e.ctx is None: | |
e.ctx = ctx | |
raise | |
def iter_params_for_processing(invocation_order, declaration_order): | |
"""Given a sequence of parameters in the order as should be considered | |
for processing and an iterable of parameters that exist, this returns | |
a list in the correct order as they should be processed. | |
""" | |
def sort_key(item): | |
try: | |
idx = invocation_order.index(item) | |
except ValueError: | |
idx = float('inf') | |
return (not item.is_eager, idx) | |
return sorted(declaration_order, key=sort_key) | |
class Context(object): | |
"""The context is a special internal object that holds state relevant | |
for the script execution at every single level. It's normally invisible | |
to commands unless they opt-in to getting access to it. | |
The context is useful as it can pass internal objects around and can | |
control special execution features such as reading data from | |
environment variables. | |
A context can be used as context manager in which case it will call | |
:meth:`close` on teardown. | |
.. versionadded:: 2.0 | |
Added the `resilient_parsing`, `help_option_names`, | |
`token_normalize_func` parameters. | |
.. versionadded:: 3.0 | |
Added the `allow_extra_args` and `allow_interspersed_args` | |
parameters. | |
.. versionadded:: 4.0 | |
Added the `color`, `ignore_unknown_options`, and | |
`max_content_width` parameters. | |
:param command: the command class for this context. | |
:param parent: the parent context. | |
:param info_name: the info name for this invocation. Generally this | |
is the most descriptive name for the script or | |
command. For the toplevel script it is usually | |
the name of the script, for commands below it it's | |
the name of the script. | |
:param obj: an arbitrary object of user data. | |
:param auto_envvar_prefix: the prefix to use for automatic environment | |
variables. If this is `None` then reading | |
from environment variables is disabled. This | |
does not affect manually set environment | |
variables which are always read. | |
:param default_map: a dictionary (like object) with default values | |
for parameters. | |
:param terminal_width: the width of the terminal. The default is | |
inherit from parent context. If no context | |
defines the terminal width then auto | |
detection will be applied. | |
:param max_content_width: the maximum width for content rendered by | |
Click (this currently only affects help | |
pages). This defaults to 80 characters if | |
not overridden. In other words: even if the | |
terminal is larger than that, Click will not | |
format things wider than 80 characters by | |
default. In addition to that, formatters might | |
add some safety mapping on the right. | |
:param resilient_parsing: if this flag is enabled then Click will | |
parse without any interactivity or callback | |
invocation. This is useful for implementing | |
things such as completion support. | |
:param allow_extra_args: if this is set to `True` then extra arguments | |
at the end will not raise an error and will be | |
kept on the context. The default is to inherit | |
from the command. | |
:param allow_interspersed_args: if this is set to `False` then options | |
and arguments cannot be mixed. The | |
default is to inherit from the command. | |
:param ignore_unknown_options: instructs click to ignore options it does | |
not know and keeps them for later | |
processing. | |
:param help_option_names: optionally a list of strings that define how | |
the default help parameter is named. The | |
default is ``['--help']``. | |
:param token_normalize_func: an optional function that is used to | |
normalize tokens (options, choices, | |
etc.). This for instance can be used to | |
implement case insensitive behavior. | |
:param color: controls if the terminal supports ANSI colors or not. The | |
default is autodetection. This is only needed if ANSI | |
codes are used in texts that Click prints which is by | |
default not the case. This for instance would affect | |
help output. | |
""" | |
def __init__(self, command, parent=None, info_name=None, obj=None, | |
auto_envvar_prefix=None, default_map=None, | |
terminal_width=None, max_content_width=None, | |
resilient_parsing=False, allow_extra_args=None, | |
allow_interspersed_args=None, | |
ignore_unknown_options=None, help_option_names=None, | |
token_normalize_func=None, color=None): | |
#: the parent context or `None` if none exists. | |
self.parent = parent | |
#: the :class:`Command` for this context. | |
self.command = command | |
#: the descriptive information name | |
self.info_name = info_name | |
#: the parsed parameters except if the value is hidden in which | |
#: case it's not remembered. | |
self.params = {} | |
#: the leftover arguments. | |
self.args = [] | |
#: protected arguments. These are arguments that are prepended | |
#: to `args` when certain parsing scenarios are encountered but | |
#: must be never propagated to another arguments. This is used | |
#: to implement nested parsing. | |
self.protected_args = [] | |
if obj is None and parent is not None: | |
obj = parent.obj | |
#: the user object stored. | |
self.obj = obj | |
self._meta = getattr(parent, 'meta', {}) | |
#: A dictionary (-like object) with defaults for parameters. | |
if default_map is None \ | |
and parent is not None \ | |
and parent.default_map is not None: | |
default_map = parent.default_map.get(info_name) | |
self.default_map = default_map | |
#: This flag indicates if a subcommand is going to be executed. A | |
#: group callback can use this information to figure out if it's | |
#: being executed directly or because the execution flow passes | |
#: onwards to a subcommand. By default it's None, but it can be | |
#: the name of the subcommand to execute. | |
#: | |
#: If chaining is enabled this will be set to ``'*'`` in case | |
#: any commands are executed. It is however not possible to | |
#: figure out which ones. If you require this knowledge you | |
#: should use a :func:`resultcallback`. | |
self.invoked_subcommand = None | |
if terminal_width is None and parent is not None: | |
terminal_width = parent.terminal_width | |
#: The width of the terminal (None is autodetection). | |
self.terminal_width = terminal_width | |
if max_content_width is None and parent is not None: | |
max_content_width = parent.max_content_width | |
#: The maximum width of formatted content (None implies a sensible | |
#: default which is 80 for most things). | |
self.max_content_width = max_content_width | |
if allow_extra_args is None: | |
allow_extra_args = command.allow_extra_args | |
#: Indicates if the context allows extra args or if it should | |
#: fail on parsing. | |
#: | |
#: .. versionadded:: 3.0 | |
self.allow_extra_args = allow_extra_args | |
if allow_interspersed_args is None: | |
allow_interspersed_args = command.allow_interspersed_args | |
#: Indicates if the context allows mixing of arguments and | |
#: options or not. | |
#: | |
#: .. versionadded:: 3.0 | |
self.allow_interspersed_args = allow_interspersed_args | |
if ignore_unknown_options is None: | |
ignore_unknown_options = command.ignore_unknown_options | |
#: Instructs click to ignore options that a command does not | |
#: understand and will store it on the context for later | |
#: processing. This is primarily useful for situations where you | |
#: want to call into external programs. Generally this pattern is | |
#: strongly discouraged because it's not possibly to losslessly | |
#: forward all arguments. | |
#: | |
#: .. versionadded:: 4.0 | |
self.ignore_unknown_options = ignore_unknown_options | |
if help_option_names is None: | |
if parent is not None: | |
help_option_names = parent.help_option_names | |
else: | |
help_option_names = ['--help'] | |
#: The names for the help options. | |
self.help_option_names = help_option_names | |
if token_normalize_func is None and parent is not None: | |
token_normalize_func = parent.token_normalize_func | |
#: An optional normalization function for tokens. This is | |
#: options, choices, commands etc. | |
self.token_normalize_func = token_normalize_func | |
#: Indicates if resilient parsing is enabled. In that case Click | |
#: will do its best to not cause any failures. | |
self.resilient_parsing = resilient_parsing | |
# If there is no envvar prefix yet, but the parent has one and | |
# the command on this level has a name, we can expand the envvar | |
# prefix automatically. | |
if auto_envvar_prefix is None: | |
if parent is not None \ | |
and parent.auto_envvar_prefix is not None and \ | |
self.info_name is not None: | |
auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix, | |
self.info_name.upper()) | |
else: | |
self.auto_envvar_prefix = auto_envvar_prefix.upper() | |
self.auto_envvar_prefix = auto_envvar_prefix | |
if color is None and parent is not None: | |
color = parent.color | |
#: Controls if styling output is wanted or not. | |
self.color = color | |
self._close_callbacks = [] | |
self._depth = 0 | |
def __enter__(self): | |
self._depth += 1 | |
push_context(self) | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self._depth -= 1 | |
if self._depth == 0: | |
self.close() | |
pop_context() | |
@contextmanager | |
def scope(self, cleanup=True): | |
"""This helper method can be used with the context object to promote | |
it to the current thread local (see :func:`get_current_context`). | |
The default behavior of this is to invoke the cleanup functions which | |
can be disabled by setting `cleanup` to `False`. The cleanup | |
functions are typically used for things such as closing file handles. | |
If the cleanup is intended the context object can also be directly | |
used as a context manager. | |
Example usage:: | |
with ctx.scope(): | |
assert get_current_context() is ctx | |
This is equivalent:: | |
with ctx: | |
assert get_current_context() is ctx | |
.. versionadded:: 5.0 | |
:param cleanup: controls if the cleanup functions should be run or | |
not. The default is to run these functions. In | |
some situations the context only wants to be | |
temporarily pushed in which case this can be disabled. | |
Nested pushes automatically defer the cleanup. | |
""" | |
if not cleanup: | |
self._depth += 1 | |
try: | |
with self as rv: | |
yield rv | |
finally: | |
if not cleanup: | |
self._depth -= 1 | |
@property | |
def meta(self): | |
"""This is a dictionary which is shared with all the contexts | |
that are nested. It exists so that click utiltiies can store some | |
state here if they need to. It is however the responsibility of | |
that code to manage this dictionary well. | |
The keys are supposed to be unique dotted strings. For instance | |
module paths are a good choice for it. What is stored in there is | |
irrelevant for the operation of click. However what is important is | |
that code that places data here adheres to the general semantics of | |
the system. | |
Example usage:: | |
LANG_KEY = __name__ + '.lang' | |
def set_language(value): | |
ctx = get_current_context() | |
ctx.meta[LANG_KEY] = value | |
def get_language(): | |
return get_current_context().meta.get(LANG_KEY, 'en_US') | |
.. versionadded:: 5.0 | |
""" | |
return self._meta | |
def make_formatter(self): | |
"""Creates the formatter for the help and usage output.""" | |
return HelpFormatter(width=self.terminal_width, | |
max_width=self.max_content_width) | |
def call_on_close(self, f): | |
"""This decorator remembers a function as callback that should be | |
executed when the context tears down. This is most useful to bind | |
resource handling to the script execution. For instance, file objects | |
opened by the :class:`File` type will register their close callbacks | |
here. | |
:param f: the function to execute on teardown. | |
""" | |
self._close_callbacks.append(f) | |
return f | |
def close(self): | |
"""Invokes all close callbacks.""" | |
for cb in self._close_callbacks: | |
cb() | |
self._close_callbacks = [] | |
@property | |
def command_path(self): | |
"""The computed command path. This is used for the ``usage`` | |
information on the help page. It's automatically created by | |
combining the info names of the chain of contexts to the root. | |
""" | |
rv = '' | |
if self.info_name is not None: | |
rv = self.info_name | |
if self.parent is not None: | |
rv = self.parent.command_path + ' ' + rv | |
return rv.lstrip() | |
def find_root(self): | |
"""Finds the outermost context.""" | |
node = self | |
while node.parent is not None: | |
node = node.parent | |
return node | |
def find_object(self, object_type): | |
"""Finds the closest object of a given type.""" | |
node = self | |
while node is not None: | |
if isinstance(node.obj, object_type): | |
return node.obj | |
node = node.parent | |
def ensure_object(self, object_type): | |
"""Like :meth:`find_object` but sets the innermost object to a | |
new instance of `object_type` if it does not exist. | |
""" | |
rv = self.find_object(object_type) | |
if rv is None: | |
self.obj = rv = object_type() | |
return rv | |
def lookup_default(self, name): | |
"""Looks up the default for a parameter name. This by default | |
looks into the :attr:`default_map` if available. | |
""" | |
if self.default_map is not None: | |
rv = self.default_map.get(name) | |
if callable(rv): | |
rv = rv() | |
return rv | |
def fail(self, message): | |
"""Aborts the execution of the program with a specific error | |
message. | |
:param message: the error message to fail with. | |
""" | |
raise UsageError(message, self) | |
def abort(self): | |
"""Aborts the script.""" | |
raise Abort() | |
def exit(self, code=0): | |
"""Exits the application with a given exit code.""" | |
sys.exit(code) | |
def get_usage(self): | |
"""Helper method to get formatted usage string for the current | |
context and command. | |
""" | |
return self.command.get_usage(self) | |
def get_help(self): | |
"""Helper method to get formatted help page for the current | |
context and command. | |
""" | |
return self.command.get_help(self) | |
def invoke(*args, **kwargs): | |
"""Invokes a command callback in exactly the way it expects. There | |
are two ways to invoke this method: | |
1. the first argument can be a callback and all other arguments and | |
keyword arguments are forwarded directly to the function. | |
2. the first argument is a click command object. In that case all | |
arguments are forwarded as well but proper click parameters | |
(options and click arguments) must be keyword arguments and Click | |
will fill in defaults. | |
Note that before Click 3.2 keyword arguments were not properly filled | |
in against the intention of this code and no context was created. For | |
more information about this change and why it was done in a bugfix | |
release see :ref:`upgrade-to-3.2`. | |
""" | |
self, callback = args[:2] | |
ctx = self | |
# It's also possible to invoke another command which might or | |
# might not have a callback. In that case we also fill | |
# in defaults and make a new context for this command. | |
if isinstance(callback, Command): | |
other_cmd = callback | |
callback = other_cmd.callback | |
ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) | |
if callback is None: | |
raise TypeError('The given command does not have a ' | |
'callback that can be invoked.') | |
for param in other_cmd.params: | |
if param.name not in kwargs and param.expose_value: | |
kwargs[param.name] = param.get_default(ctx) | |
args = args[2:] | |
with augment_usage_errors(self): | |
with ctx: | |
return callback(*args, **kwargs) | |
def forward(*args, **kwargs): | |
"""Similar to :meth:`invoke` but fills in default keyword | |
arguments from the current context if the other command expects | |
it. This cannot invoke callbacks directly, only other commands. | |
""" | |
self, cmd = args[:2] | |
# It's also possible to invoke another command which might or | |
# might not have a callback. | |
if not isinstance(cmd, Command): | |
raise TypeError('Callback is not a command.') | |
for param in self.params: | |
if param not in kwargs: | |
kwargs[param] = self.params[param] | |
return self.invoke(cmd, **kwargs) | |
class BaseCommand(object): | |
"""The base command implements the minimal API contract of commands. | |
Most code will never use this as it does not implement a lot of useful | |
functionality but it can act as the direct subclass of alternative | |
parsing methods that do not depend on the Click parser. | |
For instance, this can be used to bridge Click and other systems like | |
argparse or docopt. | |
Because base commands do not implement a lot of the API that other | |
parts of Click take for granted, they are not supported for all | |
operations. For instance, they cannot be used with the decorators | |
usually and they have no built-in callback system. | |
.. versionchanged:: 2.0 | |
Added the `context_settings` parameter. | |
:param name: the name of the command to use unless a group overrides it. | |
:param context_settings: an optional dictionary with defaults that are | |
passed to the context object. | |
""" | |
#: the default for the :attr:`Context.allow_extra_args` flag. | |
allow_extra_args = False | |
#: the default for the :attr:`Context.allow_interspersed_args` flag. | |
allow_interspersed_args = True | |
#: the default for the :attr:`Context.ignore_unknown_options` flag. | |
ignore_unknown_options = False | |
def __init__(self, name, context_settings=None): | |
#: the name the command thinks it has. Upon registering a command | |
#: on a :class:`Group` the group will default the command name | |
#: with this information. You should instead use the | |
#: :class:`Context`\'s :attr:`~Context.info_name` attribute. | |
self.name = name | |
if context_settings is None: | |
context_settings = {} | |
#: an optional dictionary with defaults passed to the context. | |
self.context_settings = context_settings | |
def get_usage(self, ctx): | |
raise NotImplementedError('Base commands cannot get usage') | |
def get_help(self, ctx): | |
raise NotImplementedError('Base commands cannot get help') | |
def make_context(self, info_name, args, parent=None, **extra): | |
"""This function when given an info name and arguments will kick | |
off the parsing and create a new :class:`Context`. It does not | |
invoke the actual command callback though. | |
:param info_name: the info name for this invokation. Generally this | |
is the most descriptive name for the script or | |
command. For the toplevel script it's usually | |
the name of the script, for commands below it it's | |
the name of the script. | |
:param args: the arguments to parse as list of strings. | |
:param parent: the parent context if available. | |
:param extra: extra keyword arguments forwarded to the context | |
constructor. | |
""" | |
for key, value in iteritems(self.context_settings): | |
if key not in extra: | |
extra[key] = value | |
ctx = Context(self, info_name=info_name, parent=parent, **extra) | |
with ctx.scope(cleanup=False): | |
self.parse_args(ctx, args) | |
return ctx | |
def parse_args(self, ctx, args): | |
"""Given a context and a list of arguments this creates the parser | |
and parses the arguments, then modifies the context as necessary. | |
This is automatically invoked by :meth:`make_context`. | |
""" | |
raise NotImplementedError('Base commands do not know how to parse ' | |
'arguments.') | |
def invoke(self, ctx): | |
"""Given a context, this invokes the command. The default | |
implementation is raising a not implemented error. | |
""" | |
raise NotImplementedError('Base commands are not invokable by default') | |
def main(self, args=None, prog_name=None, complete_var=None, | |
standalone_mode=True, **extra): | |
"""This is the way to invoke a script with all the bells and | |
whistles as a command line application. This will always terminate | |
the application after a call. If this is not wanted, ``SystemExit`` | |
needs to be caught. | |
This method is also available by directly calling the instance of | |
a :class:`Command`. | |
.. versionadded:: 3.0 | |
Added the `standalone_mode` flag to control the standalone mode. | |
:param args: the arguments that should be used for parsing. If not | |
provided, ``sys.argv[1:]`` is used. | |
:param prog_name: the program name that should be used. By default | |
the program name is constructed by taking the file | |
name from ``sys.argv[0]``. | |
:param complete_var: the environment variable that controls the | |
bash completion support. The default is | |
``"_<prog_name>_COMPLETE"`` with prog name in | |
uppercase. | |
:param standalone_mode: the default behavior is to invoke the script | |
in standalone mode. Click will then | |
handle exceptions and convert them into | |
error messages and the function will never | |
return but shut down the interpreter. If | |
this is set to `False` they will be | |
propagated to the caller and the return | |
value of this function is the return value | |
of :meth:`invoke`. | |
:param extra: extra keyword arguments are forwarded to the context | |
constructor. See :class:`Context` for more information. | |
""" | |
# If we are in Python 3, we will verify that the environment is | |
# sane at this point of reject further execution to avoid a | |
# broken script. | |
if not PY2: | |
_verify_python3_env() | |
else: | |
_check_for_unicode_literals() | |
if args is None: | |
args = get_os_args() | |
else: | |
args = list(args) | |
if prog_name is None: | |
prog_name = make_str(os.path.basename( | |
sys.argv and sys.argv[0] or __file__)) | |
# Hook for the Bash completion. This only activates if the Bash | |
# completion is actually enabled, otherwise this is quite a fast | |
# noop. | |
_bashcomplete(self, prog_name, complete_var) | |
try: | |
try: | |
with self.make_context(prog_name, args, **extra) as ctx: | |
rv = self.invoke(ctx) | |
if not standalone_mode: | |
return rv | |
ctx.exit() | |
except (EOFError, KeyboardInterrupt): | |
echo(file=sys.stderr) | |
raise Abort() | |
except ClickException as e: | |
if not standalone_mode: | |
raise | |
e.show() | |
sys.exit(e.exit_code) | |
except IOError as e: | |
if e.errno == errno.EPIPE: | |
sys.exit(1) | |
else: | |
raise | |
except Abort: | |
if not standalone_mode: | |
raise | |
echo('Aborted!', file=sys.stderr) | |
sys.exit(1) | |
def __call__(self, *args, **kwargs): | |
"""Alias for :meth:`main`.""" | |
return self.main(*args, **kwargs) | |
class Command(BaseCommand): | |
"""Commands are the basic building block of command line interfaces in | |
Click. A basic command handles command line parsing and might dispatch | |
more parsing to commands nested below it. | |
.. versionchanged:: 2.0 | |
Added the `context_settings` parameter. | |
:param name: the name of the command to use unless a group overrides it. | |
:param context_settings: an optional dictionary with defaults that are | |
passed to the context object. | |
:param callback: the callback to invoke. This is optional. | |
:param params: the parameters to register with this command. This can | |
be either :class:`Option` or :class:`Argument` objects. | |
:param help: the help string to use for this command. | |
:param epilog: like the help string but it's printed at the end of the | |
help page after everything else. | |
:param short_help: the short help to use for this command. This is | |
shown on the command listing of the parent command. | |
:param add_help_option: by default each command registers a ``--help`` | |
option. This can be disabled by this parameter. | |
""" | |
def __init__(self, name, context_settings=None, callback=None, | |
params=None, help=None, epilog=None, short_help=None, | |
options_metavar='[OPTIONS]', add_help_option=True): | |
BaseCommand.__init__(self, name, context_settings) | |
#: the callback to execute when the command fires. This might be | |
#: `None` in which case nothing happens. | |
self.callback = callback | |
#: the list of parameters for this command in the order they | |
#: should show up in the help page and execute. Eager parameters | |
#: will automatically be handled before non eager ones. | |
self.params = params or [] | |
self.help = help | |
self.epilog = epilog | |
self.options_metavar = options_metavar | |
if short_help is None and help: | |
short_help = make_default_short_help(help) | |
self.short_help = short_help | |
self.add_help_option = add_help_option | |
def get_usage(self, ctx): | |
formatter = ctx.make_formatter() | |
self.format_usage(ctx, formatter) | |
return formatter.getvalue().rstrip('\n') | |
def get_params(self, ctx): | |
rv = self.params | |
help_option = self.get_help_option(ctx) | |
if help_option is not None: | |
rv = rv + [help_option] | |
return rv | |
def format_usage(self, ctx, formatter): | |
"""Writes the usage line into the formatter.""" | |
pieces = self.collect_usage_pieces(ctx) | |
formatter.write_usage(ctx.command_path, ' '.join(pieces)) | |
def collect_usage_pieces(self, ctx): | |
"""Returns all the pieces that go into the usage line and returns | |
it as a list of strings. | |
""" | |
rv = [self.options_metavar] | |
for param in self.get_params(ctx): | |
rv.extend(param.get_usage_pieces(ctx)) | |
return rv | |
def get_help_option_names(self, ctx): | |
"""Returns the names for the help option.""" | |
all_names = set(ctx.help_option_names) | |
for param in self.params: | |
all_names.difference_update(param.opts) | |
all_names.difference_update(param.secondary_opts) | |
return all_names | |
def get_help_option(self, ctx): | |
"""Returns the help option object.""" | |
help_options = self.get_help_option_names(ctx) | |
if not help_options or not self.add_help_option: | |
return | |
def show_help(ctx, param, value): | |
if value and not ctx.resilient_parsing: | |
echo(ctx.get_help(), color=ctx.color) | |
ctx.exit() | |
return Option(help_options, is_flag=True, | |
is_eager=True, expose_value=False, | |
callback=show_help, | |
help='Show this message and exit.') | |
def make_parser(self, ctx): | |
"""Creates the underlying option parser for this command.""" | |
parser = OptionParser(ctx) | |
parser.allow_interspersed_args = ctx.allow_interspersed_args | |
parser.ignore_unknown_options = ctx.ignore_unknown_options | |
for param in self.get_params(ctx): | |
param.add_to_parser(parser, ctx) | |
return parser | |
def get_help(self, ctx): | |
"""Formats the help into a string and returns it. This creates a | |
formatter and will call into the following formatting methods: | |
""" | |
formatter = ctx.make_formatter() | |
self.format_help(ctx, formatter) | |
return formatter.getvalue().rstrip('\n') | |
def format_help(self, ctx, formatter): | |
"""Writes the help into the formatter if it exists. | |
This calls into the following methods: | |
- :meth:`format_usage` | |
- :meth:`format_help_text` | |
- :meth:`format_options` | |
- :meth:`format_epilog` | |
""" | |
self.format_usage(ctx, formatter) | |
self.format_help_text(ctx, formatter) | |
self.format_options(ctx, formatter) | |
self.format_epilog(ctx, formatter) | |
def format_help_text(self, ctx, formatter): | |
"""Writes the help text to the formatter if it exists.""" | |
if self.help: | |
formatter.write_paragraph() | |
with formatter.indentation(): | |
formatter.write_text(self.help) | |
def format_options(self, ctx, formatter): | |
"""Writes all the options into the formatter if they exist.""" | |
opts = [] | |
for param in self.get_params(ctx): | |
rv = param.get_help_record(ctx) | |
if rv is not None: | |
opts.append(rv) | |
if opts: | |
with formatter.section('Options'): | |
formatter.write_dl(opts) | |
def format_epilog(self, ctx, formatter): | |
"""Writes the epilog into the formatter if it exists.""" | |
if self.epilog: | |
formatter.write_paragraph() | |
with formatter.indentation(): | |
formatter.write_text(self.epilog) | |
def parse_args(self, ctx, args): | |
parser = self.make_parser(ctx) | |
opts, args, param_order = parser.parse_args(args=args) | |
for param in iter_params_for_processing( | |
param_order, self.get_params(ctx)): | |
value, args = param.handle_parse_result(ctx, opts, args) | |
if args and not ctx.allow_extra_args and not ctx.resilient_parsing: | |
ctx.fail('Got unexpected extra argument%s (%s)' | |
% (len(args) != 1 and 's' or '', | |
' '.join(map(make_str, args)))) | |
ctx.args = args | |
return args | |
def invoke(self, ctx): | |
"""Given a context, this invokes the attached callback (if it exists) | |
in the right way. | |
""" | |
if self.callback is not None: | |
return ctx.invoke(self.callback, **ctx.params) | |
class MultiCommand(Command): | |
"""A multi command is the basic implementation of a command that | |
dispatches to subcommands. The most common version is the | |
:class:`Group`. | |
:param invoke_without_command: this controls how the multi command itself | |
is invoked. By default it's only invoked | |
if a subcommand is provided. | |
:param no_args_is_help: this controls what happens if no arguments are | |
provided. This option is enabled by default if | |
`invoke_without_command` is disabled or disabled | |
if it's enabled. If enabled this will add | |
``--help`` as argument if no arguments are | |
passed. | |
:param subcommand_metavar: the string that is used in the documentation | |
to indicate the subcommand place. | |
:param chain: if this is set to `True` chaining of multiple subcommands | |
is enabled. This restricts the form of commands in that | |
they cannot have optional arguments but it allows | |
multiple commands to be chained together. | |
:param result_callback: the result callback to attach to this multi | |
command. | |
""" | |
allow_extra_args = True | |
allow_interspersed_args = False | |
def __init__(self, name=None, invoke_without_command=False, | |
no_args_is_help=None, subcommand_metavar=None, | |
chain=False, result_callback=None, **attrs): | |
Command.__init__(self, name, **attrs) | |
if no_args_is_help is None: | |
no_args_is_help = not invoke_without_command | |
self.no_args_is_help = no_args_is_help | |
self.invoke_without_command = invoke_without_command | |
if subcommand_metavar is None: | |
if chain: | |
subcommand_metavar = SUBCOMMANDS_METAVAR | |
else: | |
subcommand_metavar = SUBCOMMAND_METAVAR | |
self.subcommand_metavar = subcommand_metavar | |
self.chain = chain | |
#: The result callback that is stored. This can be set or | |
#: overridden with the :func:`resultcallback` decorator. | |
self.result_callback = result_callback | |
if self.chain: | |
for param in self.params: | |
if isinstance(param, Argument) and not param.required: | |
raise RuntimeError('Multi commands in chain mode cannot ' | |
'have optional arguments.') | |
def collect_usage_pieces(self, ctx): | |
rv = Command.collect_usage_pieces(self, ctx) | |
rv.append(self.subcommand_metavar) | |
return rv | |
def format_options(self, ctx, formatter): | |
Command.format_options(self, ctx, formatter) | |
self.format_commands(ctx, formatter) | |
def resultcallback(self, replace=False): | |
"""Adds a result callback to the chain command. By default if a | |
result callback is already registered this will chain them but | |
this can be disabled with the `replace` parameter. The result | |
callback is invoked with the return value of the subcommand | |
(or the list of return values from all subcommands if chaining | |
is enabled) as well as the parameters as they would be passed | |
to the main callback. | |
Example:: | |
@click.group() | |
@click.option('-i', '--input', default=23) | |
def cli(input): | |
return 42 | |
@cli.resultcallback() | |
def process_result(result, input): | |
return result + input | |
.. versionadded:: 3.0 | |
:param replace: if set to `True` an already existing result | |
callback will be removed. | |
""" | |
def decorator(f): | |
old_callback = self.result_callback | |
if old_callback is None or replace: | |
self.result_callback = f | |
return f | |
def function(__value, *args, **kwargs): | |
return f(old_callback(__value, *args, **kwargs), | |
*args, **kwargs) | |
self.result_callback = rv = update_wrapper(function, f) | |
return rv | |
return decorator | |
def format_commands(self, ctx, formatter): | |
"""Extra format methods for multi methods that adds all the commands | |
after the options. | |
""" | |
rows = [] | |
for subcommand in self.list_commands(ctx): | |
cmd = self.get_command(ctx, subcommand) | |
# What is this, the tool lied about a command. Ignore it | |
if cmd is None: | |
continue | |
help = cmd.short_help or '' | |
rows.append((subcommand, help)) | |
if rows: | |
with formatter.section('Commands'): | |
formatter.write_dl(rows) | |
def parse_args(self, ctx, args): | |
if not args and self.no_args_is_help and not ctx.resilient_parsing: | |
echo(ctx.get_help(), color=ctx.color) | |
ctx.exit() | |
rest = Command.parse_args(self, ctx, args) | |
if self.chain: | |
ctx.protected_args = rest | |
ctx.args = [] | |
elif rest: | |
ctx.protected_args, ctx.args = rest[:1], rest[1:] | |
return ctx.args | |
def invoke(self, ctx): | |
def _process_result(value): | |
if self.result_callback is not None: | |
value = ctx.invoke(self.result_callback, value, | |
**ctx.params) | |
return value | |
if not ctx.protected_args: | |
# If we are invoked without command the chain flag controls | |
# how this happens. If we are not in chain mode, the return | |
# value here is the return value of the command. | |
# If however we are in chain mode, the return value is the | |
# return value of the result processor invoked with an empty | |
# list (which means that no subcommand actually was executed). | |
if self.invoke_without_command: | |
if not self.chain: | |
return Command.invoke(self, ctx) | |
with ctx: | |
Command.invoke(self, ctx) | |
return _process_result([]) | |
ctx.fail('Missing command.') | |
# Fetch args back out | |
args = ctx.protected_args + ctx.args | |
ctx.args = [] | |
ctx.protected_args = [] | |
# If we're not in chain mode, we only allow the invocation of a | |
# single command but we also inform the current context about the | |
# name of the command to invoke. | |
if not self.chain: | |
# Make sure the context is entered so we do not clean up | |
# resources until the result processor has worked. | |
with ctx: | |
cmd_name, cmd, args = self.resolve_command(ctx, args) | |
ctx.invoked_subcommand = cmd_name | |
Command.invoke(self, ctx) | |
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) | |
with sub_ctx: | |
return _process_result(sub_ctx.command.invoke(sub_ctx)) | |
# In chain mode we create the contexts step by step, but after the | |
# base command has been invoked. Because at that point we do not | |
# know the subcommands yet, the invoked subcommand attribute is | |
# set to ``*`` to inform the command that subcommands are executed | |
# but nothing else. | |
with ctx: | |
ctx.invoked_subcommand = args and '*' or None | |
Command.invoke(self, ctx) | |
# Otherwise we make every single context and invoke them in a | |
# chain. In that case the return value to the result processor | |
# is the list of all invoked subcommand's results. | |
contexts = [] | |
while args: | |
cmd_name, cmd, args = self.resolve_command(ctx, args) | |
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, | |
allow_extra_args=True, | |
allow_interspersed_args=False) | |
contexts.append(sub_ctx) | |
args, sub_ctx.args = sub_ctx.args, [] | |
rv = [] | |
for sub_ctx in contexts: | |
with sub_ctx: | |
rv.append(sub_ctx.command.invoke(sub_ctx)) | |
return _process_result(rv) | |
def resolve_command(self, ctx, args): | |
cmd_name = make_str(args[0]) | |
original_cmd_name = cmd_name | |
# Get the command | |
cmd = self.get_command(ctx, cmd_name) | |
# If we can't find the command but there is a normalization | |
# function available, we try with that one. | |
if cmd is None and ctx.token_normalize_func is not None: | |
cmd_name = ctx.token_normalize_func(cmd_name) | |
cmd = self.get_command(ctx, cmd_name) | |
# If we don't find the command we want to show an error message | |
# to the user that it was not provided. However, there is | |
# something else we should do: if the first argument looks like | |
# an option we want to kick off parsing again for arguments to | |
# resolve things like --help which now should go to the main | |
# place. | |
if cmd is None: | |
if split_opt(cmd_name)[0]: | |
self.parse_args(ctx, ctx.args) | |
ctx.fail('No such command "%s".' % original_cmd_name) | |
return cmd_name, cmd, args[1:] | |
def get_command(self, ctx, cmd_name): | |
"""Given a context and a command name, this returns a | |
:class:`Command` object if it exists or returns `None`. | |
""" | |
raise NotImplementedError() | |
def list_commands(self, ctx): | |
"""Returns a list of subcommand names in the order they should | |
appear. | |
""" | |
return [] | |
class Group(MultiCommand): | |
"""A group allows a command to have subcommands attached. This is the | |
most common way to implement nesting in Click. | |
:param commands: a dictionary of commands. | |
""" | |
def __init__(self, name=None, commands=None, **attrs): | |
MultiCommand.__init__(self, name, **attrs) | |
#: the registered subcommands by their exported names. | |
self.commands = commands or {} | |
def add_command(self, cmd, name=None): | |
"""Registers another :class:`Command` with this group. If the name | |
is not provided, the name of the command is used. | |
""" | |
name = name or cmd.name | |
if name is None: | |
raise TypeError('Command has no name.') | |
_check_multicommand(self, name, cmd, register=True) | |
self.commands[name] = cmd | |
def command(self, *args, **kwargs): | |
"""A shortcut decorator for declaring and attaching a command to | |
the group. This takes the same arguments as :func:`command` but | |
immediately registers the created command with this instance by | |
calling into :meth:`add_command`. | |
""" | |
def decorator(f): | |
cmd = command(*args, **kwargs)(f) | |
self.add_command(cmd) | |
return cmd | |
return decorator | |
def group(self, *args, **kwargs): | |
"""A shortcut decorator for declaring and attaching a group to | |
the group. This takes the same arguments as :func:`group` but | |
immediately registers the created command with this instance by | |
calling into :meth:`add_command`. | |
""" | |
def decorator(f): | |
cmd = group(*args, **kwargs)(f) | |
self.add_command(cmd) | |
return cmd | |
return decorator | |
def get_command(self, ctx, cmd_name): | |
return self.commands.get(cmd_name) | |
def list_commands(self, ctx): | |
return sorted(self.commands) | |
class CommandCollection(MultiCommand): | |
"""A command collection is a multi command that merges multiple multi | |
commands together into one. This is a straightforward implementation | |
that accepts a list of different multi commands as sources and | |
provides all the commands for each of them. | |
""" | |
def __init__(self, name=None, sources=None, **attrs): | |
MultiCommand.__init__(self, name, **attrs) | |
#: The list of registered multi commands. | |
self.sources = sources or [] | |
def add_source(self, multi_cmd): | |
"""Adds a new multi command to the chain dispatcher.""" | |
self.sources.append(multi_cmd) | |
def get_command(self, ctx, cmd_name): | |
for source in self.sources: | |
rv = source.get_command(ctx, cmd_name) | |
if rv is not None: | |
if self.chain: | |
_check_multicommand(self, cmd_name, rv) | |
return rv | |
def list_commands(self, ctx): | |
rv = set() | |
for source in self.sources: | |
rv.update(source.list_commands(ctx)) | |
return sorted(rv) | |
class Parameter(object): | |
"""A parameter to a command comes in two versions: they are either | |
:class:`Option`\s or :class:`Argument`\s. Other subclasses are currently | |
not supported by design as some of the internals for parsing are | |
intentionally not finalized. | |
Some settings are supported by both options and arguments. | |
.. versionchanged:: 2.0 | |
Changed signature for parameter callback to also be passed the | |
parameter. In Click 2.0, the old callback format will still work, | |
but it will raise a warning to give you change to migrate the | |
code easier. | |
:param param_decls: the parameter declarations for this option or | |
argument. This is a list of flags or argument | |
names. | |
:param type: the type that should be used. Either a :class:`ParamType` | |
or a Python type. The later is converted into the former | |
automatically if supported. | |
:param required: controls if this is optional or not. | |
:param default: the default value if omitted. This can also be a callable, | |
in which case it's invoked when the default is needed | |
without any arguments. | |
:param callback: a callback that should be executed after the parameter | |
was matched. This is called as ``fn(ctx, param, | |
value)`` and needs to return the value. Before Click | |
2.0, the signature was ``(ctx, value)``. | |
:param nargs: the number of arguments to match. If not ``1`` the return | |
value is a tuple instead of single value. The default for | |
nargs is ``1`` (except if the type is a tuple, then it's | |
the arity of the tuple). | |
:param metavar: how the value is represented in the help page. | |
:param expose_value: if this is `True` then the value is passed onwards | |
to the command callback and stored on the context, | |
otherwise it's skipped. | |
:param is_eager: eager values are processed before non eager ones. This | |
should not be set for arguments or it will inverse the | |
order of processing. | |
:param envvar: a string or list of strings that are environment variables | |
that should be checked. | |
""" | |
param_type_name = 'parameter' | |
def __init__(self, param_decls=None, type=None, required=False, | |
default=None, callback=None, nargs=None, metavar=None, | |
expose_value=True, is_eager=False, envvar=None): | |
self.name, self.opts, self.secondary_opts = \ | |
self._parse_decls(param_decls or (), expose_value) | |
self.type = convert_type(type, default) | |
# Default nargs to what the type tells us if we have that | |
# information available. | |
if nargs is None: | |
if self.type.is_composite: | |
nargs = self.type.arity | |
else: | |
nargs = 1 | |
self.required = required | |
self.callback = callback | |
self.nargs = nargs | |
self.multiple = False | |
self.expose_value = expose_value | |
self.default = default | |
self.is_eager = is_eager | |
self.metavar = metavar | |
self.envvar = envvar | |
@property | |
def human_readable_name(self): | |
"""Returns the human readable name of this parameter. This is the | |
same as the name for options, but the metavar for arguments. | |
""" | |
return self.name | |
def make_metavar(self): | |
if self.metavar is not None: | |
return self.metavar | |
metavar = self.type.get_metavar(self) | |
if metavar is None: | |
metavar = self.type.name.upper() | |
if self.nargs != 1: | |
metavar += '...' | |
return metavar | |
def get_default(self, ctx): | |
"""Given a context variable this calculates the default value.""" | |
# Otherwise go with the regular default. | |
if callable(self.default): | |
rv = self.default() | |
else: | |
rv = self.default | |
return self.type_cast_value(ctx, rv) | |
def add_to_parser(self, parser, ctx): | |
pass | |
def consume_value(self, ctx, opts): | |
value = opts.get(self.name) | |
if value is None: | |
value = ctx.lookup_default(self.name) | |
if value is None: | |
value = self.value_from_envvar(ctx) | |
return value | |
def type_cast_value(self, ctx, value): | |
"""Given a value this runs it properly through the type system. | |
This automatically handles things like `nargs` and `multiple` as | |
well as composite types. | |
""" | |
if self.type.is_composite: | |
if self.nargs <= 1: | |
raise TypeError('Attempted to invoke composite type ' | |
'but nargs has been set to %s. This is ' | |
'not supported; nargs needs to be set to ' | |
'a fixed value > 1.' % self.nargs) | |
if self.multiple: | |
return tuple(self.type(x or (), self, ctx) for x in value or ()) | |
return self.type(value or (), self, ctx) | |
def _convert(value, level): | |
if level == 0: | |
return self.type(value, self, ctx) | |
return tuple(_convert(x, level - 1) for x in value or ()) | |
return _convert(value, (self.nargs != 1) + bool(self.multiple)) | |
def process_value(self, ctx, value): | |
"""Given a value and context this runs the logic to convert the | |
value as necessary. | |
""" | |
# If the value we were given is None we do nothing. This way | |
# code that calls this can easily figure out if something was | |
# not provided. Otherwise it would be converted into an empty | |
# tuple for multiple invocations which is inconvenient. | |
if value is not None: | |
return self.type_cast_value(ctx, value) | |
def value_is_missing(self, value): | |
if value is None: | |
return True | |
if (self.nargs != 1 or self.multiple) and value == (): | |
return True | |
return False | |
def full_process_value(self, ctx, value): | |
value = self.process_value(ctx, value) | |
if value is None: | |
value = self.get_default(ctx) | |
if self.required and self.value_is_missing(value): | |
raise MissingParameter(ctx=ctx, param=self) | |
return value | |
def resolve_envvar_value(self, ctx): | |
if self.envvar is None: | |
return | |
if isinstance(self.envvar, (tuple, list)): | |
for envvar in self.envvar: | |
rv = os.environ.get(envvar) | |
if rv is not None: | |
return rv | |
else: | |
return os.environ.get(self.envvar) | |
def value_from_envvar(self, ctx): | |
rv = self.resolve_envvar_value(ctx) | |
if rv is not None and self.nargs != 1: | |
rv = self.type.split_envvar_value(rv) | |
return rv | |
def handle_parse_result(self, ctx, opts, args): | |
with augment_usage_errors(ctx, param=self): | |
value = self.consume_value(ctx, opts) | |
try: | |
value = self.full_process_value(ctx, value) | |
except Exception: | |
if not ctx.resilient_parsing: | |
raise | |
value = None | |
if self.callback is not None: | |
try: | |
value = invoke_param_callback( | |
self.callback, ctx, self, value) | |
except Exception: | |
if not ctx.resilient_parsing: | |
raise | |
if self.expose_value: | |
ctx.params[self.name] = value | |
return value, args | |
def get_help_record(self, ctx): | |
pass | |
def get_usage_pieces(self, ctx): | |
return [] | |
class Option(Parameter): | |
"""Options are usually optional values on the command line and | |
have some extra features that arguments don't have. | |
All other parameters are passed onwards to the parameter constructor. | |
:param show_default: controls if the default value should be shown on the | |
help page. Normally, defaults are not shown. | |
:param prompt: if set to `True` or a non empty string then the user will | |
be prompted for input if not set. If set to `True` the | |
prompt will be the option name capitalized. | |
:param confirmation_prompt: if set then the value will need to be confirmed | |
if it was prompted for. | |
:param hide_input: if this is `True` then the input on the prompt will be | |
hidden from the user. This is useful for password | |
input. | |
:param is_flag: forces this option to act as a flag. The default is | |
auto detection. | |
:param flag_value: which value should be used for this flag if it's | |
enabled. This is set to a boolean automatically if | |
the option string contains a slash to mark two options. | |
:param multiple: if this is set to `True` then the argument is accepted | |
multiple times and recorded. This is similar to ``nargs`` | |
in how it works but supports arbitrary number of | |
arguments. | |
:param count: this flag makes an option increment an integer. | |
:param allow_from_autoenv: if this is enabled then the value of this | |
parameter will be pulled from an environment | |
variable in case a prefix is defined on the | |
context. | |
:param help: the help string. | |
""" | |
param_type_name = 'option' | |
def __init__(self, param_decls=None, show_default=False, | |
prompt=False, confirmation_prompt=False, | |
hide_input=False, is_flag=None, flag_value=None, | |
multiple=False, count=False, allow_from_autoenv=True, | |
type=None, help=None, **attrs): | |
default_is_missing = attrs.get('default', _missing) is _missing | |
Parameter.__init__(self, param_decls, type=type, **attrs) | |
if prompt is True: | |
prompt_text = self.name.replace('_', ' ').capitalize() | |
elif prompt is False: | |
prompt_text = None | |
else: | |
prompt_text = prompt | |
self.prompt = prompt_text | |
self.confirmation_prompt = confirmation_prompt | |
self.hide_input = hide_input | |
# Flags | |
if is_flag is None: | |
if flag_value is not None: | |
is_flag = True | |
else: | |
is_flag = bool(self.secondary_opts) | |
if is_flag and default_is_missing: | |
self.default = False | |
if flag_value is None: | |
flag_value = not self.default | |
self.is_flag = is_flag | |
self.flag_value = flag_value | |
if self.is_flag and isinstance(self.flag_value, bool) \ | |
and type is None: | |
self.type = BOOL | |
self.is_bool_flag = True | |
else: | |
self.is_bool_flag = False | |
# Counting | |
self.count = count | |
if count: | |
if type is None: | |
self.type = IntRange(min=0) | |
if default_is_missing: | |
self.default = 0 | |
self.multiple = multiple | |
self.allow_from_autoenv = allow_from_autoenv | |
self.help = help | |
self.show_default = show_default | |
# Sanity check for stuff we don't support | |
if __debug__: | |
if self.nargs < 0: | |
raise TypeError('Options cannot have nargs < 0') | |
if self.prompt and self.is_flag and not self.is_bool_flag: | |
raise TypeError('Cannot prompt for flags that are not bools.') | |
if not self.is_bool_flag and self.secondary_opts: | |
raise TypeError('Got secondary option for non boolean flag.') | |
if self.is_bool_flag and self.hide_input \ | |
and self.prompt is not None: | |
raise TypeError('Hidden input does not work with boolean ' | |
'flag prompts.') | |
if self.count: | |
if self.multiple: | |
raise TypeError('Options cannot be multiple and count ' | |
'at the same time.') | |
elif self.is_flag: | |
raise TypeError('Options cannot be count and flags at ' | |
'the same time.') | |
def _parse_decls(self, decls, expose_value): | |
opts = [] | |
secondary_opts = [] | |
name = None | |
possible_names = [] | |
for decl in decls: | |
if isidentifier(decl): | |
if name is not None: | |
raise TypeError('Name defined twice') | |
name = decl | |
else: | |
split_char = decl[:1] == '/' and ';' or '/' | |
if split_char in decl: | |
first, second = decl.split(split_char, 1) | |
first = first.rstrip() | |
if first: | |
possible_names.append(split_opt(first)) | |
opts.append(first) | |
second = second.lstrip() | |
if second: | |
secondary_opts.append(second.lstrip()) | |
else: | |
possible_names.append(split_opt(decl)) | |
opts.append(decl) | |
if name is None and possible_names: | |
possible_names.sort(key=lambda x: len(x[0])) | |
name = possible_names[-1][1].replace('-', '_').lower() | |
if not isidentifier(name): | |
name = None | |
if name is None: | |
if not expose_value: | |
return None, opts, secondary_opts | |
raise TypeError('Could not determine name for option') | |
if not opts and not secondary_opts: | |
raise TypeError('No options defined but a name was passed (%s). ' | |
'Did you mean to declare an argument instead ' | |
'of an option?' % name) | |
return name, opts, secondary_opts | |
def add_to_parser(self, parser, ctx): | |
kwargs = { | |
'dest': self.name, | |
'nargs': self.nargs, | |
'obj': self, | |
} | |
if self.multiple: | |
action = 'append' | |
elif self.count: | |
action = 'count' | |
else: | |
action = 'store' | |
if self.is_flag: | |
kwargs.pop('nargs', None) | |
if self.is_bool_flag and self.secondary_opts: | |
parser.add_option(self.opts, action=action + '_const', | |
const=True, **kwargs) | |
parser.add_option(self.secondary_opts, action=action + | |
'_const', const=False, **kwargs) | |
else: | |
parser.add_option(self.opts, action=action + '_const', | |
const=self.flag_value, | |
**kwargs) | |
else: | |
kwargs['action'] = action | |
parser.add_option(self.opts, **kwargs) | |
def get_help_record(self, ctx): | |
any_prefix_is_slash = [] | |
def _write_opts(opts): | |
rv, any_slashes = join_options(opts) | |
if any_slashes: | |
any_prefix_is_slash[:] = [True] | |
if not self.is_flag and not self.count: | |
rv += ' ' + self.make_metavar() | |
return rv | |
rv = [_write_opts(self.opts)] | |
if self.secondary_opts: | |
rv.append(_write_opts(self.secondary_opts)) | |
help = self.help or '' | |
extra = [] | |
if self.default is not None and self.show_default: | |
extra.append('default: %s' % ( | |
', '.join('%s' % d for d in self.default) | |
if isinstance(self.default, (list, tuple)) | |
else self.default, )) | |
if self.required: | |
extra.append('required') | |
if extra: | |
help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra)) | |
return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help) | |
def get_default(self, ctx): | |
# If we're a non boolean flag out default is more complex because | |
# we need to look at all flags in the same group to figure out | |
# if we're the the default one in which case we return the flag | |
# value as default. | |
if self.is_flag and not self.is_bool_flag: | |
for param in ctx.command.params: | |
if param.name == self.name and param.default: | |
return param.flag_value | |
return None | |
return Parameter.get_default(self, ctx) | |
def prompt_for_value(self, ctx): | |
"""This is an alternative flow that can be activated in the full | |
value processing if a value does not exist. It will prompt the | |
user until a valid value exists and then returns the processed | |
value as result. | |
""" | |
# Calculate the default before prompting anything to be stable. | |
default = self.get_default(ctx) | |
# If this is a prompt for a flag we need to handle this | |
# differently. | |
if self.is_bool_flag: | |
return confirm(self.prompt, default) | |
return prompt(self.prompt, default=default, | |
hide_input=self.hide_input, | |
confirmation_prompt=self.confirmation_prompt, | |
value_proc=lambda x: self.process_value(ctx, x)) | |
def resolve_envvar_value(self, ctx): | |
rv = Parameter.resolve_envvar_value(self, ctx) | |
if rv is not None: | |
return rv | |
if self.allow_from_autoenv and \ | |
ctx.auto_envvar_prefix is not None: | |
envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) | |
return os.environ.get(envvar) | |
def value_from_envvar(self, ctx): | |
rv = self.resolve_envvar_value(ctx) | |
if rv is None: | |
return None | |
value_depth = (self.nargs != 1) + bool(self.multiple) | |
if value_depth > 0 and rv is not None: | |
rv = self.type.split_envvar_value(rv) | |
if self.multiple and self.nargs != 1: | |
rv = batch(rv, self.nargs) | |
return rv | |
def full_process_value(self, ctx, value): | |
if value is None and self.prompt is not None \ | |
and not ctx.resilient_parsing: | |
return self.prompt_for_value(ctx) | |
return Parameter.full_process_value(self, ctx, value) | |
class Argument(Parameter): | |
"""Arguments are positional parameters to a command. They generally | |
provide fewer features than options but can have infinite ``nargs`` | |
and are required by default. | |
All parameters are passed onwards to the parameter constructor. | |
""" | |
param_type_name = 'argument' | |
def __init__(self, param_decls, required=None, **attrs): | |
if required is None: | |
if attrs.get('default') is not None: | |
required = False | |
else: | |
required = attrs.get('nargs', 1) > 0 | |
Parameter.__init__(self, param_decls, required=required, **attrs) | |
if self.default is not None and self.nargs < 0: | |
raise TypeError('nargs=-1 in combination with a default value ' | |
'is not supported.') | |
@property | |
def human_readable_name(self): | |
if self.metavar is not None: | |
return self.metavar | |
return self.name.upper() | |
def make_metavar(self): | |
if self.metavar is not None: | |
return self.metavar | |
var = self.name.upper() | |
if not self.required: | |
var = '[%s]' % var | |
if self.nargs != 1: | |
var += '...' | |
return var | |
def _parse_decls(self, decls, expose_value): | |
if not decls: | |
if not expose_value: | |
return None, [], [] | |
raise TypeError('Could not determine name for argument') | |
if len(decls) == 1: | |
name = arg = decls[0] | |
name = name.replace('-', '_').lower() | |
elif len(decls) == 2: | |
name, arg = decls | |
else: | |
raise TypeError('Arguments take exactly one or two ' | |
'parameter declarations, got %d' % len(decls)) | |
return name, [arg], [] | |
def get_usage_pieces(self, ctx): | |
return [self.make_metavar()] | |
def add_to_parser(self, parser, ctx): | |
parser.add_argument(dest=self.name, nargs=self.nargs, | |
obj=self) | |
# Circular dependency between decorators and core | |
from .decorators import command, group |
import sys | |
import inspect | |
from functools import update_wrapper | |
from ._compat import iteritems | |
from ._unicodefun import _check_for_unicode_literals | |
from .utils import echo | |
from .globals import get_current_context | |
def pass_context(f): | |
"""Marks a callback as wanting to receive the current context | |
object as first argument. | |
""" | |
def new_func(*args, **kwargs): | |
return f(get_current_context(), *args, **kwargs) | |
return update_wrapper(new_func, f) | |
def pass_obj(f): | |
"""Similar to :func:`pass_context`, but only pass the object on the | |
context onwards (:attr:`Context.obj`). This is useful if that object | |
represents the state of a nested system. | |
""" | |
def new_func(*args, **kwargs): | |
return f(get_current_context().obj, *args, **kwargs) | |
return update_wrapper(new_func, f) | |
def make_pass_decorator(object_type, ensure=False): | |
"""Given an object type this creates a decorator that will work | |
similar to :func:`pass_obj` but instead of passing the object of the | |
current context, it will find the innermost context of type | |
:func:`object_type`. | |
This generates a decorator that works roughly like this:: | |
from functools import update_wrapper | |
def decorator(f): | |
@pass_context | |
def new_func(ctx, *args, **kwargs): | |
obj = ctx.find_object(object_type) | |
return ctx.invoke(f, obj, *args, **kwargs) | |
return update_wrapper(new_func, f) | |
return decorator | |
:param object_type: the type of the object to pass. | |
:param ensure: if set to `True`, a new object will be created and | |
remembered on the context if it's not there yet. | |
""" | |
def decorator(f): | |
def new_func(*args, **kwargs): | |
ctx = get_current_context() | |
if ensure: | |
obj = ctx.ensure_object(object_type) | |
else: | |
obj = ctx.find_object(object_type) | |
if obj is None: | |
raise RuntimeError('Managed to invoke callback without a ' | |
'context object of type %r existing' | |
% object_type.__name__) | |
return ctx.invoke(f, obj, *args[1:], **kwargs) | |
return update_wrapper(new_func, f) | |
return decorator | |
def _make_command(f, name, attrs, cls): | |
if isinstance(f, Command): | |
raise TypeError('Attempted to convert a callback into a ' | |
'command twice.') | |
try: | |
params = f.__click_params__ | |
params.reverse() | |
del f.__click_params__ | |
except AttributeError: | |
params = [] | |
help = attrs.get('help') | |
if help is None: | |
help = inspect.getdoc(f) | |
if isinstance(help, bytes): | |
help = help.decode('utf-8') | |
else: | |
help = inspect.cleandoc(help) | |
attrs['help'] = help | |
_check_for_unicode_literals() | |
return cls(name=name or f.__name__.lower(), | |
callback=f, params=params, **attrs) | |
def command(name=None, cls=None, **attrs): | |
"""Creates a new :class:`Command` and uses the decorated function as | |
callback. This will also automatically attach all decorated | |
:func:`option`\s and :func:`argument`\s as parameters to the command. | |
The name of the command defaults to the name of the function. If you | |
want to change that, you can pass the intended name as the first | |
argument. | |
All keyword arguments are forwarded to the underlying command class. | |
Once decorated the function turns into a :class:`Command` instance | |
that can be invoked as a command line utility or be attached to a | |
command :class:`Group`. | |
:param name: the name of the command. This defaults to the function | |
name. | |
:param cls: the command class to instantiate. This defaults to | |
:class:`Command`. | |
""" | |
if cls is None: | |
cls = Command | |
def decorator(f): | |
cmd = _make_command(f, name, attrs, cls) | |
cmd.__doc__ = f.__doc__ | |
return cmd | |
return decorator | |
def group(name=None, **attrs): | |
"""Creates a new :class:`Group` with a function as callback. This | |
works otherwise the same as :func:`command` just that the `cls` | |
parameter is set to :class:`Group`. | |
""" | |
attrs.setdefault('cls', Group) | |
return command(name, **attrs) | |
def _param_memo(f, param): | |
if isinstance(f, Command): | |
f.params.append(param) | |
else: | |
if not hasattr(f, '__click_params__'): | |
f.__click_params__ = [] | |
f.__click_params__.append(param) | |
def argument(*param_decls, **attrs): | |
"""Attaches an argument to the command. All positional arguments are | |
passed as parameter declarations to :class:`Argument`; all keyword | |
arguments are forwarded unchanged (except ``cls``). | |
This is equivalent to creating an :class:`Argument` instance manually | |
and attaching it to the :attr:`Command.params` list. | |
:param cls: the argument class to instantiate. This defaults to | |
:class:`Argument`. | |
""" | |
def decorator(f): | |
ArgumentClass = attrs.pop('cls', Argument) | |
_param_memo(f, ArgumentClass(param_decls, **attrs)) | |
return f | |
return decorator | |
def option(*param_decls, **attrs): | |
"""Attaches an option to the command. All positional arguments are | |
passed as parameter declarations to :class:`Option`; all keyword | |
arguments are forwarded unchanged (except ``cls``). | |
This is equivalent to creating an :class:`Option` instance manually | |
and attaching it to the :attr:`Command.params` list. | |
:param cls: the option class to instantiate. This defaults to | |
:class:`Option`. | |
""" | |
def decorator(f): | |
if 'help' in attrs: | |
attrs['help'] = inspect.cleandoc(attrs['help']) | |
OptionClass = attrs.pop('cls', Option) | |
_param_memo(f, OptionClass(param_decls, **attrs)) | |
return f | |
return decorator | |
def confirmation_option(*param_decls, **attrs): | |
"""Shortcut for confirmation prompts that can be ignored by passing | |
``--yes`` as parameter. | |
This is equivalent to decorating a function with :func:`option` with | |
the following parameters:: | |
def callback(ctx, param, value): | |
if not value: | |
ctx.abort() | |
@click.command() | |
@click.option('--yes', is_flag=True, callback=callback, | |
expose_value=False, prompt='Do you want to continue?') | |
def dropdb(): | |
pass | |
""" | |
def decorator(f): | |
def callback(ctx, param, value): | |
if not value: | |
ctx.abort() | |
attrs.setdefault('is_flag', True) | |
attrs.setdefault('callback', callback) | |
attrs.setdefault('expose_value', False) | |
attrs.setdefault('prompt', 'Do you want to continue?') | |
attrs.setdefault('help', 'Confirm the action without prompting.') | |
return option(*(param_decls or ('--yes',)), **attrs)(f) | |
return decorator | |
def password_option(*param_decls, **attrs): | |
"""Shortcut for password prompts. | |
This is equivalent to decorating a function with :func:`option` with | |
the following parameters:: | |
@click.command() | |
@click.option('--password', prompt=True, confirmation_prompt=True, | |
hide_input=True) | |
def changeadmin(password): | |
pass | |
""" | |
def decorator(f): | |
attrs.setdefault('prompt', True) | |
attrs.setdefault('confirmation_prompt', True) | |
attrs.setdefault('hide_input', True) | |
return option(*(param_decls or ('--password',)), **attrs)(f) | |
return decorator | |
def version_option(version=None, *param_decls, **attrs): | |
"""Adds a ``--version`` option which immediately ends the program | |
printing out the version number. This is implemented as an eager | |
option that prints the version and exits the program in the callback. | |
:param version: the version number to show. If not provided Click | |
attempts an auto discovery via setuptools. | |
:param prog_name: the name of the program (defaults to autodetection) | |
:param message: custom message to show instead of the default | |
(``'%(prog)s, version %(version)s'``) | |
:param others: everything else is forwarded to :func:`option`. | |
""" | |
if version is None: | |
module = sys._getframe(1).f_globals.get('__name__') | |
def decorator(f): | |
prog_name = attrs.pop('prog_name', None) | |
message = attrs.pop('message', '%(prog)s, version %(version)s') | |
def callback(ctx, param, value): | |
if not value or ctx.resilient_parsing: | |
return | |
prog = prog_name | |
if prog is None: | |
prog = ctx.find_root().info_name | |
ver = version | |
if ver is None: | |
try: | |
import pkg_resources | |
except ImportError: | |
pass | |
else: | |
for dist in pkg_resources.working_set: | |
scripts = dist.get_entry_map().get('console_scripts') or {} | |
for script_name, entry_point in iteritems(scripts): | |
if entry_point.module_name == module: | |
ver = dist.version | |
break | |
if ver is None: | |
raise RuntimeError('Could not determine version') | |
echo(message % { | |
'prog': prog, | |
'version': ver, | |
}, color=ctx.color) | |
ctx.exit() | |
attrs.setdefault('is_flag', True) | |
attrs.setdefault('expose_value', False) | |
attrs.setdefault('is_eager', True) | |
attrs.setdefault('help', 'Show the version and exit.') | |
attrs['callback'] = callback | |
return option(*(param_decls or ('--version',)), **attrs)(f) | |
return decorator | |
def help_option(*param_decls, **attrs): | |
"""Adds a ``--help`` option which immediately ends the program | |
printing out the help page. This is usually unnecessary to add as | |
this is added by default to all commands unless suppressed. | |
Like :func:`version_option`, this is implemented as eager option that | |
prints in the callback and exits. | |
All arguments are forwarded to :func:`option`. | |
""" | |
def decorator(f): | |
def callback(ctx, param, value): | |
if value and not ctx.resilient_parsing: | |
echo(ctx.get_help(), color=ctx.color) | |
ctx.exit() | |
attrs.setdefault('is_flag', True) | |
attrs.setdefault('expose_value', False) | |
attrs.setdefault('help', 'Show this message and exit.') | |
attrs.setdefault('is_eager', True) | |
attrs['callback'] = callback | |
return option(*(param_decls or ('--help',)), **attrs)(f) | |
return decorator | |
# Circular dependencies between core and decorators | |
from .core import Command, Group, Argument, Option |
from ._compat import PY2, filename_to_ui, get_text_stderr | |
from .utils import echo | |
class ClickException(Exception): | |
"""An exception that Click can handle and show to the user.""" | |
#: The exit code for this exception | |
exit_code = 1 | |
def __init__(self, message): | |
if PY2: | |
if message is not None: | |
message = message.encode('utf-8') | |
Exception.__init__(self, message) | |
self.message = message | |
def format_message(self): | |
return self.message | |
def show(self, file=None): | |
if file is None: | |
file = get_text_stderr() | |
echo('Error: %s' % self.format_message(), file=file) | |
class UsageError(ClickException): | |
"""An internal exception that signals a usage error. This typically | |
aborts any further handling. | |
:param message: the error message to display. | |
:param ctx: optionally the context that caused this error. Click will | |
fill in the context automatically in some situations. | |
""" | |
exit_code = 2 | |
def __init__(self, message, ctx=None): | |
ClickException.__init__(self, message) | |
self.ctx = ctx | |
def show(self, file=None): | |
if file is None: | |
file = get_text_stderr() | |
color = None | |
if self.ctx is not None: | |
color = self.ctx.color | |
echo(self.ctx.get_usage() + '\n', file=file, color=color) | |
echo('Error: %s' % self.format_message(), file=file, color=color) | |
class BadParameter(UsageError): | |
"""An exception that formats out a standardized error message for a | |
bad parameter. This is useful when thrown from a callback or type as | |
Click will attach contextual information to it (for instance, which | |
parameter it is). | |
.. versionadded:: 2.0 | |
:param param: the parameter object that caused this error. This can | |
be left out, and Click will attach this info itself | |
if possible. | |
:param param_hint: a string that shows up as parameter name. This | |
can be used as alternative to `param` in cases | |
where custom validation should happen. If it is | |
a string it's used as such, if it's a list then | |
each item is quoted and separated. | |
""" | |
def __init__(self, message, ctx=None, param=None, | |
param_hint=None): | |
UsageError.__init__(self, message, ctx) | |
self.param = param | |
self.param_hint = param_hint | |
def format_message(self): | |
if self.param_hint is not None: | |
param_hint = self.param_hint | |
elif self.param is not None: | |
param_hint = self.param.opts or [self.param.human_readable_name] | |
else: | |
return 'Invalid value: %s' % self.message | |
if isinstance(param_hint, (tuple, list)): | |
param_hint = ' / '.join('"%s"' % x for x in param_hint) | |
return 'Invalid value for %s: %s' % (param_hint, self.message) | |
class MissingParameter(BadParameter): | |
"""Raised if click required an option or argument but it was not | |
provided when invoking the script. | |
.. versionadded:: 4.0 | |
:param param_type: a string that indicates the type of the parameter. | |
The default is to inherit the parameter type from | |
the given `param`. Valid values are ``'parameter'``, | |
``'option'`` or ``'argument'``. | |
""" | |
def __init__(self, message=None, ctx=None, param=None, | |
param_hint=None, param_type=None): | |
BadParameter.__init__(self, message, ctx, param, param_hint) | |
self.param_type = param_type | |
def format_message(self): | |
if self.param_hint is not None: | |
param_hint = self.param_hint | |
elif self.param is not None: | |
param_hint = self.param.opts or [self.param.human_readable_name] | |
else: | |
param_hint = None | |
if isinstance(param_hint, (tuple, list)): | |
param_hint = ' / '.join('"%s"' % x for x in param_hint) | |
param_type = self.param_type | |
if param_type is None and self.param is not None: | |
param_type = self.param.param_type_name | |
msg = self.message | |
if self.param is not None: | |
msg_extra = self.param.type.get_missing_message(self.param) | |
if msg_extra: | |
if msg: | |
msg += '. ' + msg_extra | |
else: | |
msg = msg_extra | |
return 'Missing %s%s%s%s' % ( | |
param_type, | |
param_hint and ' %s' % param_hint or '', | |
msg and '. ' or '.', | |
msg or '', | |
) | |
class NoSuchOption(UsageError): | |
"""Raised if click attempted to handle an option that does not | |
exist. | |
.. versionadded:: 4.0 | |
""" | |
def __init__(self, option_name, message=None, possibilities=None, | |
ctx=None): | |
if message is None: | |
message = 'no such option: %s' % option_name | |
UsageError.__init__(self, message, ctx) | |
self.option_name = option_name | |
self.possibilities = possibilities | |
def format_message(self): | |
bits = [self.message] | |
if self.possibilities: | |
if len(self.possibilities) == 1: | |
bits.append('Did you mean %s?' % self.possibilities[0]) | |
else: | |
possibilities = sorted(self.possibilities) | |
bits.append('(Possible options: %s)' % ', '.join(possibilities)) | |
return ' '.join(bits) | |
class BadOptionUsage(UsageError): | |
"""Raised if an option is generally supplied but the use of the option | |
was incorrect. This is for instance raised if the number of arguments | |
for an option is not correct. | |
.. versionadded:: 4.0 | |
""" | |
def __init__(self, message, ctx=None): | |
UsageError.__init__(self, message, ctx) | |
class BadArgumentUsage(UsageError): | |
"""Raised if an argument is generally supplied but the use of the argument | |
was incorrect. This is for instance raised if the number of values | |
for an argument is not correct. | |
.. versionadded:: 6.0 | |
""" | |
def __init__(self, message, ctx=None): | |
UsageError.__init__(self, message, ctx) | |
class FileError(ClickException): | |
"""Raised if a file cannot be opened.""" | |
def __init__(self, filename, hint=None): | |
ui_filename = filename_to_ui(filename) | |
if hint is None: | |
hint = 'unknown error' | |
ClickException.__init__(self, hint) | |
self.ui_filename = ui_filename | |
self.filename = filename | |
def format_message(self): | |
return 'Could not open file %s: %s' % (self.ui_filename, self.message) | |
class Abort(RuntimeError): | |
"""An internal signalling exception that signals Click to abort.""" |
from contextlib import contextmanager | |
from .termui import get_terminal_size | |
from .parser import split_opt | |
from ._compat import term_len | |
# Can force a width. This is used by the test system | |
FORCED_WIDTH = None | |
def measure_table(rows): | |
widths = {} | |
for row in rows: | |
for idx, col in enumerate(row): | |
widths[idx] = max(widths.get(idx, 0), term_len(col)) | |
return tuple(y for x, y in sorted(widths.items())) | |
def iter_rows(rows, col_count): | |
for row in rows: | |
row = tuple(row) | |
yield row + ('',) * (col_count - len(row)) | |
def wrap_text(text, width=78, initial_indent='', subsequent_indent='', | |
preserve_paragraphs=False): | |
"""A helper function that intelligently wraps text. By default, it | |
assumes that it operates on a single paragraph of text but if the | |
`preserve_paragraphs` parameter is provided it will intelligently | |
handle paragraphs (defined by two empty lines). | |
If paragraphs are handled, a paragraph can be prefixed with an empty | |
line containing the ``\\b`` character (``\\x08``) to indicate that | |
no rewrapping should happen in that block. | |
:param text: the text that should be rewrapped. | |
:param width: the maximum width for the text. | |
:param initial_indent: the initial indent that should be placed on the | |
first line as a string. | |
:param subsequent_indent: the indent string that should be placed on | |
each consecutive line. | |
:param preserve_paragraphs: if this flag is set then the wrapping will | |
intelligently handle paragraphs. | |
""" | |
from ._textwrap import TextWrapper | |
text = text.expandtabs() | |
wrapper = TextWrapper(width, initial_indent=initial_indent, | |
subsequent_indent=subsequent_indent, | |
replace_whitespace=False) | |
if not preserve_paragraphs: | |
return wrapper.fill(text) | |
p = [] | |
buf = [] | |
indent = None | |
def _flush_par(): | |
if not buf: | |
return | |
if buf[0].strip() == '\b': | |
p.append((indent or 0, True, '\n'.join(buf[1:]))) | |
else: | |
p.append((indent or 0, False, ' '.join(buf))) | |
del buf[:] | |
for line in text.splitlines(): | |
if not line: | |
_flush_par() | |
indent = None | |
else: | |
if indent is None: | |
orig_len = term_len(line) | |
line = line.lstrip() | |
indent = orig_len - term_len(line) | |
buf.append(line) | |
_flush_par() | |
rv = [] | |
for indent, raw, text in p: | |
with wrapper.extra_indent(' ' * indent): | |
if raw: | |
rv.append(wrapper.indent_only(text)) | |
else: | |
rv.append(wrapper.fill(text)) | |
return '\n\n'.join(rv) | |
class HelpFormatter(object): | |
"""This class helps with formatting text-based help pages. It's | |
usually just needed for very special internal cases, but it's also | |
exposed so that developers can write their own fancy outputs. | |
At present, it always writes into memory. | |
:param indent_increment: the additional increment for each level. | |
:param width: the width for the text. This defaults to the terminal | |
width clamped to a maximum of 78. | |
""" | |
def __init__(self, indent_increment=2, width=None, max_width=None): | |
self.indent_increment = indent_increment | |
if max_width is None: | |
max_width = 80 | |
if width is None: | |
width = FORCED_WIDTH | |
if width is None: | |
width = max(min(get_terminal_size()[0], max_width) - 2, 50) | |
self.width = width | |
self.current_indent = 0 | |
self.buffer = [] | |
def write(self, string): | |
"""Writes a unicode string into the internal buffer.""" | |
self.buffer.append(string) | |
def indent(self): | |
"""Increases the indentation.""" | |
self.current_indent += self.indent_increment | |
def dedent(self): | |
"""Decreases the indentation.""" | |
self.current_indent -= self.indent_increment | |
def write_usage(self, prog, args='', prefix='Usage: '): | |
"""Writes a usage line into the buffer. | |
:param prog: the program name. | |
:param args: whitespace separated list of arguments. | |
:param prefix: the prefix for the first line. | |
""" | |
usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) | |
text_width = self.width - self.current_indent | |
if text_width >= (term_len(usage_prefix) + 20): | |
# The arguments will fit to the right of the prefix. | |
indent = ' ' * term_len(usage_prefix) | |
self.write(wrap_text(args, text_width, | |
initial_indent=usage_prefix, | |
subsequent_indent=indent)) | |
else: | |
# The prefix is too long, put the arguments on the next line. | |
self.write(usage_prefix) | |
self.write('\n') | |
indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) | |
self.write(wrap_text(args, text_width, | |
initial_indent=indent, | |
subsequent_indent=indent)) | |
self.write('\n') | |
def write_heading(self, heading): | |
"""Writes a heading into the buffer.""" | |
self.write('%*s%s:\n' % (self.current_indent, '', heading)) | |
def write_paragraph(self): | |
"""Writes a paragraph into the buffer.""" | |
if self.buffer: | |
self.write('\n') | |
def write_text(self, text): | |
"""Writes re-indented text into the buffer. This rewraps and | |
preserves paragraphs. | |
""" | |
text_width = max(self.width - self.current_indent, 11) | |
indent = ' ' * self.current_indent | |
self.write(wrap_text(text, text_width, | |
initial_indent=indent, | |
subsequent_indent=indent, | |
preserve_paragraphs=True)) | |
self.write('\n') | |
def write_dl(self, rows, col_max=30, col_spacing=2): | |
"""Writes a definition list into the buffer. This is how options | |
and commands are usually formatted. | |
:param rows: a list of two item tuples for the terms and values. | |
:param col_max: the maximum width of the first column. | |
:param col_spacing: the number of spaces between the first and | |
second column. | |
""" | |
rows = list(rows) | |
widths = measure_table(rows) | |
if len(widths) != 2: | |
raise TypeError('Expected two columns for definition list') | |
first_col = min(widths[0], col_max) + col_spacing | |
for first, second in iter_rows(rows, len(widths)): | |
self.write('%*s%s' % (self.current_indent, '', first)) | |
if not second: | |
self.write('\n') | |
continue | |
if term_len(first) <= first_col - col_spacing: | |
self.write(' ' * (first_col - term_len(first))) | |
else: | |
self.write('\n') | |
self.write(' ' * (first_col + self.current_indent)) | |
text_width = max(self.width - first_col - 2, 10) | |
lines = iter(wrap_text(second, text_width).splitlines()) | |
if lines: | |
self.write(next(lines) + '\n') | |
for line in lines: | |
self.write('%*s%s\n' % ( | |
first_col + self.current_indent, '', line)) | |
else: | |
self.write('\n') | |
@contextmanager | |
def section(self, name): | |
"""Helpful context manager that writes a paragraph, a heading, | |
and the indents. | |
:param name: the section name that is written as heading. | |
""" | |
self.write_paragraph() | |
self.write_heading(name) | |
self.indent() | |
try: | |
yield | |
finally: | |
self.dedent() | |
@contextmanager | |
def indentation(self): | |
"""A context manager that increases the indentation.""" | |
self.indent() | |
try: | |
yield | |
finally: | |
self.dedent() | |
def getvalue(self): | |
"""Returns the buffer contents.""" | |
return ''.join(self.buffer) | |
def join_options(options): | |
"""Given a list of option strings this joins them in the most appropriate | |
way and returns them in the form ``(formatted_string, | |
any_prefix_is_slash)`` where the second item in the tuple is a flag that | |
indicates if any of the option prefixes was a slash. | |
""" | |
rv = [] | |
any_prefix_is_slash = False | |
for opt in options: | |
prefix = split_opt(opt)[0] | |
if prefix == '/': | |
any_prefix_is_slash = True | |
rv.append((len(prefix), opt)) | |
rv.sort(key=lambda x: x[0]) | |
rv = ', '.join(x[1] for x in rv) | |
return rv, any_prefix_is_slash |
from threading import local | |
_local = local() | |
def get_current_context(silent=False): | |
"""Returns the current click context. This can be used as a way to | |
access the current context object from anywhere. This is a more implicit | |
alternative to the :func:`pass_context` decorator. This function is | |
primarily useful for helpers such as :func:`echo` which might be | |
interested in changing it's behavior based on the current context. | |
To push the current context, :meth:`Context.scope` can be used. | |
.. versionadded:: 5.0 | |
:param silent: is set to `True` the return value is `None` if no context | |
is available. The default behavior is to raise a | |
:exc:`RuntimeError`. | |
""" | |
try: | |
return getattr(_local, 'stack')[-1] | |
except (AttributeError, IndexError): | |
if not silent: | |
raise RuntimeError('There is no active click context.') | |
def push_context(ctx): | |
"""Pushes a new context to the current stack.""" | |
_local.__dict__.setdefault('stack', []).append(ctx) | |
def pop_context(): | |
"""Removes the top level from the stack.""" | |
_local.stack.pop() | |
def resolve_color_default(color=None): | |
""""Internal helper to get the default value of the color flag. If a | |
value is passed it's returned unchanged, otherwise it's looked up from | |
the current context. | |
""" | |
if color is not None: | |
return color | |
ctx = get_current_context(silent=True) | |
if ctx is not None: | |
return ctx.color |
# -*- coding: utf-8 -*- | |
""" | |
click.parser | |
~~~~~~~~~~~~ | |
This module started out as largely a copy paste from the stdlib's | |
optparse module with the features removed that we do not need from | |
optparse because we implement them in Click on a higher level (for | |
instance type handling, help formatting and a lot more). | |
The plan is to remove more and more from here over time. | |
The reason this is a different module and not optparse from the stdlib | |
is that there are differences in 2.x and 3.x about the error messages | |
generated and optparse in the stdlib uses gettext for no good reason | |
and might cause us issues. | |
""" | |
import re | |
from collections import deque | |
from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ | |
BadArgumentUsage | |
def _unpack_args(args, nargs_spec): | |
"""Given an iterable of arguments and an iterable of nargs specifications, | |
it returns a tuple with all the unpacked arguments at the first index | |
and all remaining arguments as the second. | |
The nargs specification is the number of arguments that should be consumed | |
or `-1` to indicate that this position should eat up all the remainders. | |
Missing items are filled with `None`. | |
""" | |
args = deque(args) | |
nargs_spec = deque(nargs_spec) | |
rv = [] | |
spos = None | |
def _fetch(c): | |
try: | |
if spos is None: | |
return c.popleft() | |
else: | |
return c.pop() | |
except IndexError: | |
return None | |
while nargs_spec: | |
nargs = _fetch(nargs_spec) | |
if nargs == 1: | |
rv.append(_fetch(args)) | |
elif nargs > 1: | |
x = [_fetch(args) for _ in range(nargs)] | |
# If we're reversed, we're pulling in the arguments in reverse, | |
# so we need to turn them around. | |
if spos is not None: | |
x.reverse() | |
rv.append(tuple(x)) | |
elif nargs < 0: | |
if spos is not None: | |
raise TypeError('Cannot have two nargs < 0') | |
spos = len(rv) | |
rv.append(None) | |
# spos is the position of the wildcard (star). If it's not `None`, | |
# we fill it with the remainder. | |
if spos is not None: | |
rv[spos] = tuple(args) | |
args = [] | |
rv[spos + 1:] = reversed(rv[spos + 1:]) | |
return tuple(rv), list(args) | |
def _error_opt_args(nargs, opt): | |
if nargs == 1: | |
raise BadOptionUsage('%s option requires an argument' % opt) | |
raise BadOptionUsage('%s option requires %d arguments' % (opt, nargs)) | |
def split_opt(opt): | |
first = opt[:1] | |
if first.isalnum(): | |
return '', opt | |
if opt[1:2] == first: | |
return opt[:2], opt[2:] | |
return first, opt[1:] | |
def normalize_opt(opt, ctx): | |
if ctx is None or ctx.token_normalize_func is None: | |
return opt | |
prefix, opt = split_opt(opt) | |
return prefix + ctx.token_normalize_func(opt) | |
def split_arg_string(string): | |
"""Given an argument string this attempts to split it into small parts.""" | |
rv = [] | |
for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" | |
r'|"([^"\\]*(?:\\.[^"\\]*)*)"' | |
r'|\S+)\s*', string, re.S): | |
arg = match.group().strip() | |
if arg[:1] == arg[-1:] and arg[:1] in '"\'': | |
arg = arg[1:-1].encode('ascii', 'backslashreplace') \ | |
.decode('unicode-escape') | |
try: | |
arg = type(string)(arg) | |
except UnicodeError: | |
pass | |
rv.append(arg) | |
return rv | |
class Option(object): | |
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): | |
self._short_opts = [] | |
self._long_opts = [] | |
self.prefixes = set() | |
for opt in opts: | |
prefix, value = split_opt(opt) | |
if not prefix: | |
raise ValueError('Invalid start character for option (%s)' | |
% opt) | |
self.prefixes.add(prefix[0]) | |
if len(prefix) == 1 and len(value) == 1: | |
self._short_opts.append(opt) | |
else: | |
self._long_opts.append(opt) | |
self.prefixes.add(prefix) | |
if action is None: | |
action = 'store' | |
self.dest = dest | |
self.action = action | |
self.nargs = nargs | |
self.const = const | |
self.obj = obj | |
@property | |
def takes_value(self): | |
return self.action in ('store', 'append') | |
def process(self, value, state): | |
if self.action == 'store': | |
state.opts[self.dest] = value | |
elif self.action == 'store_const': | |
state.opts[self.dest] = self.const | |
elif self.action == 'append': | |
state.opts.setdefault(self.dest, []).append(value) | |
elif self.action == 'append_const': | |
state.opts.setdefault(self.dest, []).append(self.const) | |
elif self.action == 'count': | |
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 | |
else: | |
raise ValueError('unknown action %r' % self.action) | |
state.order.append(self.obj) | |
class Argument(object): | |
def __init__(self, dest, nargs=1, obj=None): | |
self.dest = dest | |
self.nargs = nargs | |
self.obj = obj | |
def process(self, value, state): | |
if self.nargs > 1: | |
holes = sum(1 for x in value if x is None) | |
if holes == len(value): | |
value = None | |
elif holes != 0: | |
raise BadArgumentUsage('argument %s takes %d values' | |
% (self.dest, self.nargs)) | |
state.opts[self.dest] = value | |
state.order.append(self.obj) | |
class ParsingState(object): | |
def __init__(self, rargs): | |
self.opts = {} | |
self.largs = [] | |
self.rargs = rargs | |
self.order = [] | |
class OptionParser(object): | |
"""The option parser is an internal class that is ultimately used to | |
parse options and arguments. It's modelled after optparse and brings | |
a similar but vastly simplified API. It should generally not be used | |
directly as the high level Click classes wrap it for you. | |
It's not nearly as extensible as optparse or argparse as it does not | |
implement features that are implemented on a higher level (such as | |
types or defaults). | |
:param ctx: optionally the :class:`~click.Context` where this parser | |
should go with. | |
""" | |
def __init__(self, ctx=None): | |
#: The :class:`~click.Context` for this parser. This might be | |
#: `None` for some advanced use cases. | |
self.ctx = ctx | |
#: This controls how the parser deals with interspersed arguments. | |
#: If this is set to `False`, the parser will stop on the first | |
#: non-option. Click uses this to implement nested subcommands | |
#: safely. | |
self.allow_interspersed_args = True | |
#: This tells the parser how to deal with unknown options. By | |
#: default it will error out (which is sensible), but there is a | |
#: second mode where it will ignore it and continue processing | |
#: after shifting all the unknown options into the resulting args. | |
self.ignore_unknown_options = False | |
if ctx is not None: | |
self.allow_interspersed_args = ctx.allow_interspersed_args | |
self.ignore_unknown_options = ctx.ignore_unknown_options | |
self._short_opt = {} | |
self._long_opt = {} | |
self._opt_prefixes = set(['-', '--']) | |
self._args = [] | |
def add_option(self, opts, dest, action=None, nargs=1, const=None, | |
obj=None): | |
"""Adds a new option named `dest` to the parser. The destination | |
is not inferred (unlike with optparse) and needs to be explicitly | |
provided. Action can be any of ``store``, ``store_const``, | |
``append``, ``appnd_const`` or ``count``. | |
The `obj` can be used to identify the option in the order list | |
that is returned from the parser. | |
""" | |
if obj is None: | |
obj = dest | |
opts = [normalize_opt(opt, self.ctx) for opt in opts] | |
option = Option(opts, dest, action=action, nargs=nargs, | |
const=const, obj=obj) | |
self._opt_prefixes.update(option.prefixes) | |
for opt in option._short_opts: | |
self._short_opt[opt] = option | |
for opt in option._long_opts: | |
self._long_opt[opt] = option | |
def add_argument(self, dest, nargs=1, obj=None): | |
"""Adds a positional argument named `dest` to the parser. | |
The `obj` can be used to identify the option in the order list | |
that is returned from the parser. | |
""" | |
if obj is None: | |
obj = dest | |
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) | |
def parse_args(self, args): | |
"""Parses positional arguments and returns ``(values, args, order)`` | |
for the parsed options and arguments as well as the leftover | |
arguments if there are any. The order is a list of objects as they | |
appear on the command line. If arguments appear multiple times they | |
will be memorized multiple times as well. | |
""" | |
state = ParsingState(args) | |
try: | |
self._process_args_for_options(state) | |
self._process_args_for_args(state) | |
except UsageError: | |
if self.ctx is None or not self.ctx.resilient_parsing: | |
raise | |
return state.opts, state.largs, state.order | |
def _process_args_for_args(self, state): | |
pargs, args = _unpack_args(state.largs + state.rargs, | |
[x.nargs for x in self._args]) | |
for idx, arg in enumerate(self._args): | |
arg.process(pargs[idx], state) | |
state.largs = args | |
state.rargs = [] | |
def _process_args_for_options(self, state): | |
while state.rargs: | |
arg = state.rargs.pop(0) | |
arglen = len(arg) | |
# Double dashes always handled explicitly regardless of what | |
# prefixes are valid. | |
if arg == '--': | |
return | |
elif arg[:1] in self._opt_prefixes and arglen > 1: | |
self._process_opts(arg, state) | |
elif self.allow_interspersed_args: | |
state.largs.append(arg) | |
else: | |
state.rargs.insert(0, arg) | |
return | |
# Say this is the original argument list: | |
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] | |
# ^ | |
# (we are about to process arg(i)). | |
# | |
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of | |
# [arg0, ..., arg(i-1)] (any options and their arguments will have | |
# been removed from largs). | |
# | |
# The while loop will usually consume 1 or more arguments per pass. | |
# If it consumes 1 (eg. arg is an option that takes no arguments), | |
# then after _process_arg() is done the situation is: | |
# | |
# largs = subset of [arg0, ..., arg(i)] | |
# rargs = [arg(i+1), ..., arg(N-1)] | |
# | |
# If allow_interspersed_args is false, largs will always be | |
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but | |
# not a very interesting subset! | |
def _match_long_opt(self, opt, explicit_value, state): | |
if opt not in self._long_opt: | |
possibilities = [word for word in self._long_opt | |
if word.startswith(opt)] | |
raise NoSuchOption(opt, possibilities=possibilities) | |
option = self._long_opt[opt] | |
if option.takes_value: | |
# At this point it's safe to modify rargs by injecting the | |
# explicit value, because no exception is raised in this | |
# branch. This means that the inserted value will be fully | |
# consumed. | |
if explicit_value is not None: | |
state.rargs.insert(0, explicit_value) | |
nargs = option.nargs | |
if len(state.rargs) < nargs: | |
_error_opt_args(nargs, opt) | |
elif nargs == 1: | |
value = state.rargs.pop(0) | |
else: | |
value = tuple(state.rargs[:nargs]) | |
del state.rargs[:nargs] | |
elif explicit_value is not None: | |
raise BadOptionUsage('%s option does not take a value' % opt) | |
else: | |
value = None | |
option.process(value, state) | |
def _match_short_opt(self, arg, state): | |
stop = False | |
i = 1 | |
prefix = arg[0] | |
unknown_options = [] | |
for ch in arg[1:]: | |
opt = normalize_opt(prefix + ch, self.ctx) | |
option = self._short_opt.get(opt) | |
i += 1 | |
if not option: | |
if self.ignore_unknown_options: | |
unknown_options.append(ch) | |
continue | |
raise NoSuchOption(opt) | |
if option.takes_value: | |
# Any characters left in arg? Pretend they're the | |
# next arg, and stop consuming characters of arg. | |
if i < len(arg): | |
state.rargs.insert(0, arg[i:]) | |
stop = True | |
nargs = option.nargs | |
if len(state.rargs) < nargs: | |
_error_opt_args(nargs, opt) | |
elif nargs == 1: | |
value = state.rargs.pop(0) | |
else: | |
value = tuple(state.rargs[:nargs]) | |
del state.rargs[:nargs] | |
else: | |
value = None | |
option.process(value, state) | |
if stop: | |
break | |
# If we got any unknown options we re-combinate the string of the | |
# remaining options and re-attach the prefix, then report that | |
# to the state as new larg. This way there is basic combinatorics | |
# that can be achieved while still ignoring unknown arguments. | |
if self.ignore_unknown_options and unknown_options: | |
state.largs.append(prefix + ''.join(unknown_options)) | |
def _process_opts(self, arg, state): | |
explicit_value = None | |
# Long option handling happens in two parts. The first part is | |
# supporting explicitly attached values. In any case, we will try | |
# to long match the option first. | |
if '=' in arg: | |
long_opt, explicit_value = arg.split('=', 1) | |
else: | |
long_opt = arg | |
norm_long_opt = normalize_opt(long_opt, self.ctx) | |
# At this point we will match the (assumed) long option through | |
# the long option matching code. Note that this allows options | |
# like "-foo" to be matched as long options. | |
try: | |
self._match_long_opt(norm_long_opt, explicit_value, state) | |
except NoSuchOption: | |
# At this point the long option matching failed, and we need | |
# to try with short options. However there is a special rule | |
# which says, that if we have a two character options prefix | |
# (applies to "--foo" for instance), we do not dispatch to the | |
# short option code and will instead raise the no option | |
# error. | |
if arg[:2] not in self._opt_prefixes: | |
return self._match_short_opt(arg, state) | |
if not self.ignore_unknown_options: | |
raise | |
state.largs.append(arg) |
import os | |
import sys | |
import struct | |
from ._compat import raw_input, text_type, string_types, \ | |
isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN | |
from .utils import echo | |
from .exceptions import Abort, UsageError | |
from .types import convert_type | |
from .globals import resolve_color_default | |
# The prompt functions to use. The doc tools currently override these | |
# functions to customize how they work. | |
visible_prompt_func = raw_input | |
_ansi_colors = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', | |
'cyan', 'white', 'reset') | |
_ansi_reset_all = '\033[0m' | |
def hidden_prompt_func(prompt): | |
import getpass | |
return getpass.getpass(prompt) | |
def _build_prompt(text, suffix, show_default=False, default=None): | |
prompt = text | |
if default is not None and show_default: | |
prompt = '%s [%s]' % (prompt, default) | |
return prompt + suffix | |
def prompt(text, default=None, hide_input=False, | |
confirmation_prompt=False, type=None, | |
value_proc=None, prompt_suffix=': ', | |
show_default=True, err=False): | |
"""Prompts a user for input. This is a convenience function that can | |
be used to prompt a user for input later. | |
If the user aborts the input by sending a interrupt signal, this | |
function will catch it and raise a :exc:`Abort` exception. | |
.. versionadded:: 6.0 | |
Added unicode support for cmd.exe on Windows. | |
.. versionadded:: 4.0 | |
Added the `err` parameter. | |
:param text: the text to show for the prompt. | |
:param default: the default value to use if no input happens. If this | |
is not given it will prompt until it's aborted. | |
:param hide_input: if this is set to true then the input value will | |
be hidden. | |
:param confirmation_prompt: asks for confirmation for the value. | |
:param type: the type to use to check the value against. | |
:param value_proc: if this parameter is provided it's a function that | |
is invoked instead of the type conversion to | |
convert a value. | |
:param prompt_suffix: a suffix that should be added to the prompt. | |
:param show_default: shows or hides the default value in the prompt. | |
:param err: if set to true the file defaults to ``stderr`` instead of | |
``stdout``, the same as with echo. | |
""" | |
result = None | |
def prompt_func(text): | |
f = hide_input and hidden_prompt_func or visible_prompt_func | |
try: | |
# Write the prompt separately so that we get nice | |
# coloring through colorama on Windows | |
echo(text, nl=False, err=err) | |
return f('') | |
except (KeyboardInterrupt, EOFError): | |
# getpass doesn't print a newline if the user aborts input with ^C. | |
# Allegedly this behavior is inherited from getpass(3). | |
# A doc bug has been filed at https://bugs.python.org/issue24711 | |
if hide_input: | |
echo(None, err=err) | |
raise Abort() | |
if value_proc is None: | |
value_proc = convert_type(type, default) | |
prompt = _build_prompt(text, prompt_suffix, show_default, default) | |
while 1: | |
while 1: | |
value = prompt_func(prompt) | |
if value: | |
break | |
# If a default is set and used, then the confirmation | |
# prompt is always skipped because that's the only thing | |
# that really makes sense. | |
elif default is not None: | |
return default | |
try: | |
result = value_proc(value) | |
except UsageError as e: | |
echo('Error: %s' % e.message, err=err) | |
continue | |
if not confirmation_prompt: | |
return result | |
while 1: | |
value2 = prompt_func('Repeat for confirmation: ') | |
if value2: | |
break | |
if value == value2: | |
return result | |
echo('Error: the two entered values do not match', err=err) | |
def confirm(text, default=False, abort=False, prompt_suffix=': ', | |
show_default=True, err=False): | |
"""Prompts for confirmation (yes/no question). | |
If the user aborts the input by sending a interrupt signal this | |
function will catch it and raise a :exc:`Abort` exception. | |
.. versionadded:: 4.0 | |
Added the `err` parameter. | |
:param text: the question to ask. | |
:param default: the default for the prompt. | |
:param abort: if this is set to `True` a negative answer aborts the | |
exception by raising :exc:`Abort`. | |
:param prompt_suffix: a suffix that should be added to the prompt. | |
:param show_default: shows or hides the default value in the prompt. | |
:param err: if set to true the file defaults to ``stderr`` instead of | |
``stdout``, the same as with echo. | |
""" | |
prompt = _build_prompt(text, prompt_suffix, show_default, | |
default and 'Y/n' or 'y/N') | |
while 1: | |
try: | |
# Write the prompt separately so that we get nice | |
# coloring through colorama on Windows | |
echo(prompt, nl=False, err=err) | |
value = visible_prompt_func('').lower().strip() | |
except (KeyboardInterrupt, EOFError): | |
raise Abort() | |
if value in ('y', 'yes'): | |
rv = True | |
elif value in ('n', 'no'): | |
rv = False | |
elif value == '': | |
rv = default | |
else: | |
echo('Error: invalid input', err=err) | |
continue | |
break | |
if abort and not rv: | |
raise Abort() | |
return rv | |
def get_terminal_size(): | |
"""Returns the current size of the terminal as tuple in the form | |
``(width, height)`` in columns and rows. | |
""" | |
# If shutil has get_terminal_size() (Python 3.3 and later) use that | |
if sys.version_info >= (3, 3): | |
import shutil | |
shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None) | |
if shutil_get_terminal_size: | |
sz = shutil_get_terminal_size() | |
return sz.columns, sz.lines | |
if get_winterm_size is not None: | |
return get_winterm_size() | |
def ioctl_gwinsz(fd): | |
try: | |
import fcntl | |
import termios | |
cr = struct.unpack( | |
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) | |
except Exception: | |
return | |
return cr | |
cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) | |
if not cr: | |
try: | |
fd = os.open(os.ctermid(), os.O_RDONLY) | |
try: | |
cr = ioctl_gwinsz(fd) | |
finally: | |
os.close(fd) | |
except Exception: | |
pass | |
if not cr or not cr[0] or not cr[1]: | |
cr = (os.environ.get('LINES', 25), | |
os.environ.get('COLUMNS', DEFAULT_COLUMNS)) | |
return int(cr[1]), int(cr[0]) | |
def echo_via_pager(text, color=None): | |
"""This function takes a text and shows it via an environment specific | |
pager on stdout. | |
.. versionchanged:: 3.0 | |
Added the `color` flag. | |
:param text: the text to page. | |
:param color: controls if the pager supports ANSI colors or not. The | |
default is autodetection. | |
""" | |
color = resolve_color_default(color) | |
if not isinstance(text, string_types): | |
text = text_type(text) | |
from ._termui_impl import pager | |
return pager(text + '\n', color) | |
def progressbar(iterable=None, length=None, label=None, show_eta=True, | |
show_percent=None, show_pos=False, | |
item_show_func=None, fill_char='#', empty_char='-', | |
bar_template='%(label)s [%(bar)s] %(info)s', | |
info_sep=' ', width=36, file=None, color=None): | |
"""This function creates an iterable context manager that can be used | |
to iterate over something while showing a progress bar. It will | |
either iterate over the `iterable` or `length` items (that are counted | |
up). While iteration happens, this function will print a rendered | |
progress bar to the given `file` (defaults to stdout) and will attempt | |
to calculate remaining time and more. By default, this progress bar | |
will not be rendered if the file is not a terminal. | |
The context manager creates the progress bar. When the context | |
manager is entered the progress bar is already displayed. With every | |
iteration over the progress bar, the iterable passed to the bar is | |
advanced and the bar is updated. When the context manager exits, | |
a newline is printed and the progress bar is finalized on screen. | |
No printing must happen or the progress bar will be unintentionally | |
destroyed. | |
Example usage:: | |
with progressbar(items) as bar: | |
for item in bar: | |
do_something_with(item) | |
Alternatively, if no iterable is specified, one can manually update the | |
progress bar through the `update()` method instead of directly | |
iterating over the progress bar. The update method accepts the number | |
of steps to increment the bar with:: | |
with progressbar(length=chunks.total_bytes) as bar: | |
for chunk in chunks: | |
process_chunk(chunk) | |
bar.update(chunks.bytes) | |
.. versionadded:: 2.0 | |
.. versionadded:: 4.0 | |
Added the `color` parameter. Added a `update` method to the | |
progressbar object. | |
:param iterable: an iterable to iterate over. If not provided the length | |
is required. | |
:param length: the number of items to iterate over. By default the | |
progressbar will attempt to ask the iterator about its | |
length, which might or might not work. If an iterable is | |
also provided this parameter can be used to override the | |
length. If an iterable is not provided the progress bar | |
will iterate over a range of that length. | |
:param label: the label to show next to the progress bar. | |
:param show_eta: enables or disables the estimated time display. This is | |
automatically disabled if the length cannot be | |
determined. | |
:param show_percent: enables or disables the percentage display. The | |
default is `True` if the iterable has a length or | |
`False` if not. | |
:param show_pos: enables or disables the absolute position display. The | |
default is `False`. | |
:param item_show_func: a function called with the current item which | |
can return a string to show the current item | |
next to the progress bar. Note that the current | |
item can be `None`! | |
:param fill_char: the character to use to show the filled part of the | |
progress bar. | |
:param empty_char: the character to use to show the non-filled part of | |
the progress bar. | |
:param bar_template: the format string to use as template for the bar. | |
The parameters in it are ``label`` for the label, | |
``bar`` for the progress bar and ``info`` for the | |
info section. | |
:param info_sep: the separator between multiple info items (eta etc.) | |
:param width: the width of the progress bar in characters, 0 means full | |
terminal width | |
:param file: the file to write to. If this is not a terminal then | |
only the label is printed. | |
:param color: controls if the terminal supports ANSI colors or not. The | |
default is autodetection. This is only needed if ANSI | |
codes are included anywhere in the progress bar output | |
which is not the case by default. | |
""" | |
from ._termui_impl import ProgressBar | |
color = resolve_color_default(color) | |
return ProgressBar(iterable=iterable, length=length, show_eta=show_eta, | |
show_percent=show_percent, show_pos=show_pos, | |
item_show_func=item_show_func, fill_char=fill_char, | |
empty_char=empty_char, bar_template=bar_template, | |
info_sep=info_sep, file=file, label=label, | |
width=width, color=color) | |
def clear(): | |
"""Clears the terminal screen. This will have the effect of clearing | |
the whole visible space of the terminal and moving the cursor to the | |
top left. This does not do anything if not connected to a terminal. | |
.. versionadded:: 2.0 | |
""" | |
if not isatty(sys.stdout): | |
return | |
# If we're on Windows and we don't have colorama available, then we | |
# clear the screen by shelling out. Otherwise we can use an escape | |
# sequence. | |
if WIN: | |
os.system('cls') | |
else: | |
sys.stdout.write('\033[2J\033[1;1H') | |
def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, | |
blink=None, reverse=None, reset=True): | |
"""Styles a text with ANSI styles and returns the new string. By | |
default the styling is self contained which means that at the end | |
of the string a reset code is issued. This can be prevented by | |
passing ``reset=False``. | |
Examples:: | |
click.echo(click.style('Hello World!', fg='green')) | |
click.echo(click.style('ATTENTION!', blink=True)) | |
click.echo(click.style('Some things', reverse=True, fg='cyan')) | |
Supported color names: | |
* ``black`` (might be a gray) | |
* ``red`` | |
* ``green`` | |
* ``yellow`` (might be an orange) | |
* ``blue`` | |
* ``magenta`` | |
* ``cyan`` | |
* ``white`` (might be light gray) | |
* ``reset`` (reset the color code only) | |
.. versionadded:: 2.0 | |
:param text: the string to style with ansi codes. | |
:param fg: if provided this will become the foreground color. | |
:param bg: if provided this will become the background color. | |
:param bold: if provided this will enable or disable bold mode. | |
:param dim: if provided this will enable or disable dim mode. This is | |
badly supported. | |
:param underline: if provided this will enable or disable underline. | |
:param blink: if provided this will enable or disable blinking. | |
:param reverse: if provided this will enable or disable inverse | |
rendering (foreground becomes background and the | |
other way round). | |
:param reset: by default a reset-all code is added at the end of the | |
string which means that styles do not carry over. This | |
can be disabled to compose styles. | |
""" | |
bits = [] | |
if fg: | |
try: | |
bits.append('\033[%dm' % (_ansi_colors.index(fg) + 30)) | |
except ValueError: | |
raise TypeError('Unknown color %r' % fg) | |
if bg: | |
try: | |
bits.append('\033[%dm' % (_ansi_colors.index(bg) + 40)) | |
except ValueError: | |
raise TypeError('Unknown color %r' % bg) | |
if bold is not None: | |
bits.append('\033[%dm' % (1 if bold else 22)) | |
if dim is not None: | |
bits.append('\033[%dm' % (2 if dim else 22)) | |
if underline is not None: | |
bits.append('\033[%dm' % (4 if underline else 24)) | |
if blink is not None: | |
bits.append('\033[%dm' % (5 if blink else 25)) | |
if reverse is not None: | |
bits.append('\033[%dm' % (7 if reverse else 27)) | |
bits.append(text) | |
if reset: | |
bits.append(_ansi_reset_all) | |
return ''.join(bits) | |
def unstyle(text): | |
"""Removes ANSI styling information from a string. Usually it's not | |
necessary to use this function as Click's echo function will | |
automatically remove styling if necessary. | |
.. versionadded:: 2.0 | |
:param text: the text to remove style information from. | |
""" | |
return strip_ansi(text) | |
def secho(text, file=None, nl=True, err=False, color=None, **styles): | |
"""This function combines :func:`echo` and :func:`style` into one | |
call. As such the following two calls are the same:: | |
click.secho('Hello World!', fg='green') | |
click.echo(click.style('Hello World!', fg='green')) | |
All keyword arguments are forwarded to the underlying functions | |
depending on which one they go with. | |
.. versionadded:: 2.0 | |
""" | |
return echo(style(text, **styles), file=file, nl=nl, err=err, color=color) | |
def edit(text=None, editor=None, env=None, require_save=True, | |
extension='.txt', filename=None): | |
r"""Edits the given text in the defined editor. If an editor is given | |
(should be the full path to the executable but the regular operating | |
system search path is used for finding the executable) it overrides | |
the detected editor. Optionally, some environment variables can be | |
used. If the editor is closed without changes, `None` is returned. In | |
case a file is edited directly the return value is always `None` and | |
`require_save` and `extension` are ignored. | |
If the editor cannot be opened a :exc:`UsageError` is raised. | |
Note for Windows: to simplify cross-platform usage, the newlines are | |
automatically converted from POSIX to Windows and vice versa. As such, | |
the message here will have ``\n`` as newline markers. | |
:param text: the text to edit. | |
:param editor: optionally the editor to use. Defaults to automatic | |
detection. | |
:param env: environment variables to forward to the editor. | |
:param require_save: if this is true, then not saving in the editor | |
will make the return value become `None`. | |
:param extension: the extension to tell the editor about. This defaults | |
to `.txt` but changing this might change syntax | |
highlighting. | |
:param filename: if provided it will edit this file instead of the | |
provided text contents. It will not use a temporary | |
file as an indirection in that case. | |
""" | |
from ._termui_impl import Editor | |
editor = Editor(editor=editor, env=env, require_save=require_save, | |
extension=extension) | |
if filename is None: | |
return editor.edit(text) | |
editor.edit_file(filename) | |
def launch(url, wait=False, locate=False): | |
"""This function launches the given URL (or filename) in the default | |
viewer application for this file type. If this is an executable, it | |
might launch the executable in a new session. The return value is | |
the exit code of the launched application. Usually, ``0`` indicates | |
success. | |
Examples:: | |
click.launch('http://click.pocoo.org/') | |
click.launch('/my/downloaded/file', locate=True) | |
.. versionadded:: 2.0 | |
:param url: URL or filename of the thing to launch. | |
:param wait: waits for the program to stop. | |
:param locate: if this is set to `True` then instead of launching the | |
application associated with the URL it will attempt to | |
launch a file manager with the file located. This | |
might have weird effects if the URL does not point to | |
the filesystem. | |
""" | |
from ._termui_impl import open_url | |
return open_url(url, wait=wait, locate=locate) | |
# If this is provided, getchar() calls into this instead. This is used | |
# for unittesting purposes. | |
_getchar = None | |
def getchar(echo=False): | |
"""Fetches a single character from the terminal and returns it. This | |
will always return a unicode character and under certain rare | |
circumstances this might return more than one character. The | |
situations which more than one character is returned is when for | |
whatever reason multiple characters end up in the terminal buffer or | |
standard input was not actually a terminal. | |
Note that this will always read from the terminal, even if something | |
is piped into the standard input. | |
.. versionadded:: 2.0 | |
:param echo: if set to `True`, the character read will also show up on | |
the terminal. The default is to not show it. | |
""" | |
f = _getchar | |
if f is None: | |
from ._termui_impl import getchar as f | |
return f(echo) | |
def pause(info='Press any key to continue ...', err=False): | |
"""This command stops execution and waits for the user to press any | |
key to continue. This is similar to the Windows batch "pause" | |
command. If the program is not run through a terminal, this command | |
will instead do nothing. | |
.. versionadded:: 2.0 | |
.. versionadded:: 4.0 | |
Added the `err` parameter. | |
:param info: the info string to print before pausing. | |
:param err: if set to message goes to ``stderr`` instead of | |
``stdout``, the same as with echo. | |
""" | |
if not isatty(sys.stdin) or not isatty(sys.stdout): | |
return | |
try: | |
if info: | |
echo(info, nl=False, err=err) | |
try: | |
getchar() | |
except (KeyboardInterrupt, EOFError): | |
pass | |
finally: | |
if info: | |
echo(err=err) |
import os | |
import sys | |
import shutil | |
import tempfile | |
import contextlib | |
from ._compat import iteritems, PY2 | |
# If someone wants to vendor click, we want to ensure the | |
# correct package is discovered. Ideally we could use a | |
# relative import here but unfortunately Python does not | |
# support that. | |
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]] | |
if PY2: | |
from cStringIO import StringIO | |
else: | |
import io | |
from ._compat import _find_binary_reader | |
class EchoingStdin(object): | |
def __init__(self, input, output): | |
self._input = input | |
self._output = output | |
def __getattr__(self, x): | |
return getattr(self._input, x) | |
def _echo(self, rv): | |
self._output.write(rv) | |
return rv | |
def read(self, n=-1): | |
return self._echo(self._input.read(n)) | |
def readline(self, n=-1): | |
return self._echo(self._input.readline(n)) | |
def readlines(self): | |
return [self._echo(x) for x in self._input.readlines()] | |
def __iter__(self): | |
return iter(self._echo(x) for x in self._input) | |
def __repr__(self): | |
return repr(self._input) | |
def make_input_stream(input, charset): | |
# Is already an input stream. | |
if hasattr(input, 'read'): | |
if PY2: | |
return input | |
rv = _find_binary_reader(input) | |
if rv is not None: | |
return rv | |
raise TypeError('Could not find binary reader for input stream.') | |
if input is None: | |
input = b'' | |
elif not isinstance(input, bytes): | |
input = input.encode(charset) | |
if PY2: | |
return StringIO(input) | |
return io.BytesIO(input) | |
class Result(object): | |
"""Holds the captured result of an invoked CLI script.""" | |
def __init__(self, runner, output_bytes, exit_code, exception, | |
exc_info=None): | |
#: The runner that created the result | |
self.runner = runner | |
#: The output as bytes. | |
self.output_bytes = output_bytes | |
#: The exit code as integer. | |
self.exit_code = exit_code | |
#: The exception that happend if one did. | |
self.exception = exception | |
#: The traceback | |
self.exc_info = exc_info | |
@property | |
def output(self): | |
"""The output as unicode string.""" | |
return self.output_bytes.decode(self.runner.charset, 'replace') \ | |
.replace('\r\n', '\n') | |
def __repr__(self): | |
return '<Result %s>' % ( | |
self.exception and repr(self.exception) or 'okay', | |
) | |
class CliRunner(object): | |
"""The CLI runner provides functionality to invoke a Click command line | |
script for unittesting purposes in a isolated environment. This only | |
works in single-threaded systems without any concurrency as it changes the | |
global interpreter state. | |
:param charset: the character set for the input and output data. This is | |
UTF-8 by default and should not be changed currently as | |
the reporting to Click only works in Python 2 properly. | |
:param env: a dictionary with environment variables for overriding. | |
:param echo_stdin: if this is set to `True`, then reading from stdin writes | |
to stdout. This is useful for showing examples in | |
some circumstances. Note that regular prompts | |
will automatically echo the input. | |
""" | |
def __init__(self, charset=None, env=None, echo_stdin=False): | |
if charset is None: | |
charset = 'utf-8' | |
self.charset = charset | |
self.env = env or {} | |
self.echo_stdin = echo_stdin | |
def get_default_prog_name(self, cli): | |
"""Given a command object it will return the default program name | |
for it. The default is the `name` attribute or ``"root"`` if not | |
set. | |
""" | |
return cli.name or 'root' | |
def make_env(self, overrides=None): | |
"""Returns the environment overrides for invoking a script.""" | |
rv = dict(self.env) | |
if overrides: | |
rv.update(overrides) | |
return rv | |
@contextlib.contextmanager | |
def isolation(self, input=None, env=None, color=False): | |
"""A context manager that sets up the isolation for invoking of a | |
command line tool. This sets up stdin with the given input data | |
and `os.environ` with the overrides from the given dictionary. | |
This also rebinds some internals in Click to be mocked (like the | |
prompt functionality). | |
This is automatically done in the :meth:`invoke` method. | |
.. versionadded:: 4.0 | |
The ``color`` parameter was added. | |
:param input: the input stream to put into sys.stdin. | |
:param env: the environment overrides as dictionary. | |
:param color: whether the output should contain color codes. The | |
application can still override this explicitly. | |
""" | |
input = make_input_stream(input, self.charset) | |
old_stdin = sys.stdin | |
old_stdout = sys.stdout | |
old_stderr = sys.stderr | |
old_forced_width = clickpkg.formatting.FORCED_WIDTH | |
clickpkg.formatting.FORCED_WIDTH = 80 | |
env = self.make_env(env) | |
if PY2: | |
sys.stdout = sys.stderr = bytes_output = StringIO() | |
if self.echo_stdin: | |
input = EchoingStdin(input, bytes_output) | |
else: | |
bytes_output = io.BytesIO() | |
if self.echo_stdin: | |
input = EchoingStdin(input, bytes_output) | |
input = io.TextIOWrapper(input, encoding=self.charset) | |
sys.stdout = sys.stderr = io.TextIOWrapper( | |
bytes_output, encoding=self.charset) | |
sys.stdin = input | |
def visible_input(prompt=None): | |
sys.stdout.write(prompt or '') | |
val = input.readline().rstrip('\r\n') | |
sys.stdout.write(val + '\n') | |
sys.stdout.flush() | |
return val | |
def hidden_input(prompt=None): | |
sys.stdout.write((prompt or '') + '\n') | |
sys.stdout.flush() | |
return input.readline().rstrip('\r\n') | |
def _getchar(echo): | |
char = sys.stdin.read(1) | |
if echo: | |
sys.stdout.write(char) | |
sys.stdout.flush() | |
return char | |
default_color = color | |
def should_strip_ansi(stream=None, color=None): | |
if color is None: | |
return not default_color | |
return not color | |
old_visible_prompt_func = clickpkg.termui.visible_prompt_func | |
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func | |
old__getchar_func = clickpkg.termui._getchar | |
old_should_strip_ansi = clickpkg.utils.should_strip_ansi | |
clickpkg.termui.visible_prompt_func = visible_input | |
clickpkg.termui.hidden_prompt_func = hidden_input | |
clickpkg.termui._getchar = _getchar | |
clickpkg.utils.should_strip_ansi = should_strip_ansi | |
old_env = {} | |
try: | |
for key, value in iteritems(env): | |
old_env[key] = os.environ.get(key) | |
if value is None: | |
try: | |
del os.environ[key] | |
except Exception: | |
pass | |
else: | |
os.environ[key] = value | |
yield bytes_output | |
finally: | |
for key, value in iteritems(old_env): | |
if value is None: | |
try: | |
del os.environ[key] | |
except Exception: | |
pass | |
else: | |
os.environ[key] = value | |
sys.stdout = old_stdout | |
sys.stderr = old_stderr | |
sys.stdin = old_stdin | |
clickpkg.termui.visible_prompt_func = old_visible_prompt_func | |
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func | |
clickpkg.termui._getchar = old__getchar_func | |
clickpkg.utils.should_strip_ansi = old_should_strip_ansi | |
clickpkg.formatting.FORCED_WIDTH = old_forced_width | |
def invoke(self, cli, args=None, input=None, env=None, | |
catch_exceptions=True, color=False, **extra): | |
"""Invokes a command in an isolated environment. The arguments are | |
forwarded directly to the command line script, the `extra` keyword | |
arguments are passed to the :meth:`~clickpkg.Command.main` function of | |
the command. | |
This returns a :class:`Result` object. | |
.. versionadded:: 3.0 | |
The ``catch_exceptions`` parameter was added. | |
.. versionchanged:: 3.0 | |
The result object now has an `exc_info` attribute with the | |
traceback if available. | |
.. versionadded:: 4.0 | |
The ``color`` parameter was added. | |
:param cli: the command to invoke | |
:param args: the arguments to invoke | |
:param input: the input data for `sys.stdin`. | |
:param env: the environment overrides. | |
:param catch_exceptions: Whether to catch any other exceptions than | |
``SystemExit``. | |
:param extra: the keyword arguments to pass to :meth:`main`. | |
:param color: whether the output should contain color codes. The | |
application can still override this explicitly. | |
""" | |
exc_info = None | |
with self.isolation(input=input, env=env, color=color) as out: | |
exception = None | |
exit_code = 0 | |
try: | |
cli.main(args=args or (), | |
prog_name=self.get_default_prog_name(cli), **extra) | |
except SystemExit as e: | |
if e.code != 0: | |
exception = e | |
exc_info = sys.exc_info() | |
exit_code = e.code | |
if not isinstance(exit_code, int): | |
sys.stdout.write(str(exit_code)) | |
sys.stdout.write('\n') | |
exit_code = 1 | |
except Exception as e: | |
if not catch_exceptions: | |
raise | |
exception = e | |
exit_code = -1 | |
exc_info = sys.exc_info() | |
finally: | |
sys.stdout.flush() | |
output = out.getvalue() | |
return Result(runner=self, | |
output_bytes=output, | |
exit_code=exit_code, | |
exception=exception, | |
exc_info=exc_info) | |
@contextlib.contextmanager | |
def isolated_filesystem(self): | |
"""A context manager that creates a temporary folder and changes | |
the current working directory to it for isolated filesystem tests. | |
""" | |
cwd = os.getcwd() | |
t = tempfile.mkdtemp() | |
os.chdir(t) | |
try: | |
yield t | |
finally: | |
os.chdir(cwd) | |
try: | |
shutil.rmtree(t) | |
except (OSError, IOError): | |
pass |
import os | |
import stat | |
from ._compat import open_stream, text_type, filename_to_ui, \ | |
get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 | |
from .exceptions import BadParameter | |
from .utils import safecall, LazyFile | |
class ParamType(object): | |
"""Helper for converting values through types. The following is | |
necessary for a valid type: | |
* it needs a name | |
* it needs to pass through None unchanged | |
* it needs to convert from a string | |
* it needs to convert its result type through unchanged | |
(eg: needs to be idempotent) | |
* it needs to be able to deal with param and context being `None`. | |
This can be the case when the object is used with prompt | |
inputs. | |
""" | |
is_composite = False | |
#: the descriptive name of this type | |
name = None | |
#: if a list of this type is expected and the value is pulled from a | |
#: string environment variable, this is what splits it up. `None` | |
#: means any whitespace. For all parameters the general rule is that | |
#: whitespace splits them up. The exception are paths and files which | |
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on | |
#: Windows). | |
envvar_list_splitter = None | |
def __call__(self, value, param=None, ctx=None): | |
if value is not None: | |
return self.convert(value, param, ctx) | |
def get_metavar(self, param): | |
"""Returns the metavar default for this param if it provides one.""" | |
def get_missing_message(self, param): | |
"""Optionally might return extra information about a missing | |
parameter. | |
.. versionadded:: 2.0 | |
""" | |
def convert(self, value, param, ctx): | |
"""Converts the value. This is not invoked for values that are | |
`None` (the missing value). | |
""" | |
return value | |
def split_envvar_value(self, rv): | |
"""Given a value from an environment variable this splits it up | |
into small chunks depending on the defined envvar list splitter. | |
If the splitter is set to `None`, which means that whitespace splits, | |
then leading and trailing whitespace is ignored. Otherwise, leading | |
and trailing splitters usually lead to empty items being included. | |
""" | |
return (rv or '').split(self.envvar_list_splitter) | |
def fail(self, message, param=None, ctx=None): | |
"""Helper method to fail with an invalid value message.""" | |
raise BadParameter(message, ctx=ctx, param=param) | |
class CompositeParamType(ParamType): | |
is_composite = True | |
@property | |
def arity(self): | |
raise NotImplementedError() | |
class FuncParamType(ParamType): | |
def __init__(self, func): | |
self.name = func.__name__ | |
self.func = func | |
def convert(self, value, param, ctx): | |
try: | |
return self.func(value) | |
except ValueError: | |
try: | |
value = text_type(value) | |
except UnicodeError: | |
value = str(value).decode('utf-8', 'replace') | |
self.fail(value, param, ctx) | |
class UnprocessedParamType(ParamType): | |
name = 'text' | |
def convert(self, value, param, ctx): | |
return value | |
def __repr__(self): | |
return 'UNPROCESSED' | |
class StringParamType(ParamType): | |
name = 'text' | |
def convert(self, value, param, ctx): | |
if isinstance(value, bytes): | |
enc = _get_argv_encoding() | |
try: | |
value = value.decode(enc) | |
except UnicodeError: | |
fs_enc = get_filesystem_encoding() | |
if fs_enc != enc: | |
try: | |
value = value.decode(fs_enc) | |
except UnicodeError: | |
value = value.decode('utf-8', 'replace') | |
return value | |
return value | |
def __repr__(self): | |
return 'STRING' | |
class Choice(ParamType): | |
"""The choice type allows a value to be checked against a fixed set of | |
supported values. All of these values have to be strings. | |
See :ref:`choice-opts` for an example. | |
""" | |
name = 'choice' | |
def __init__(self, choices): | |
self.choices = choices | |
def get_metavar(self, param): | |
return '[%s]' % '|'.join(self.choices) | |
def get_missing_message(self, param): | |
return 'Choose from %s.' % ', '.join(self.choices) | |
def convert(self, value, param, ctx): | |
# Exact match | |
if value in self.choices: | |
return value | |
# Match through normalization | |
if ctx is not None and \ | |
ctx.token_normalize_func is not None: | |
value = ctx.token_normalize_func(value) | |
for choice in self.choices: | |
if ctx.token_normalize_func(choice) == value: | |
return choice | |
self.fail('invalid choice: %s. (choose from %s)' % | |
(value, ', '.join(self.choices)), param, ctx) | |
def __repr__(self): | |
return 'Choice(%r)' % list(self.choices) | |
class IntParamType(ParamType): | |
name = 'integer' | |
def convert(self, value, param, ctx): | |
try: | |
return int(value) | |
except (ValueError, UnicodeError): | |
self.fail('%s is not a valid integer' % value, param, ctx) | |
def __repr__(self): | |
return 'INT' | |
class IntRange(IntParamType): | |
"""A parameter that works similar to :data:`click.INT` but restricts | |
the value to fit into a range. The default behavior is to fail if the | |
value falls outside the range, but it can also be silently clamped | |
between the two edges. | |
See :ref:`ranges` for an example. | |
""" | |
name = 'integer range' | |
def __init__(self, min=None, max=None, clamp=False): | |
self.min = min | |
self.max = max | |
self.clamp = clamp | |
def convert(self, value, param, ctx): | |
rv = IntParamType.convert(self, value, param, ctx) | |
if self.clamp: | |
if self.min is not None and rv < self.min: | |
return self.min | |
if self.max is not None and rv > self.max: | |
return self.max | |
if self.min is not None and rv < self.min or \ | |
self.max is not None and rv > self.max: | |
if self.min is None: | |
self.fail('%s is bigger than the maximum valid value ' | |
'%s.' % (rv, self.max), param, ctx) | |
elif self.max is None: | |
self.fail('%s is smaller than the minimum valid value ' | |
'%s.' % (rv, self.min), param, ctx) | |
else: | |
self.fail('%s is not in the valid range of %s to %s.' | |
% (rv, self.min, self.max), param, ctx) | |
return rv | |
def __repr__(self): | |
return 'IntRange(%r, %r)' % (self.min, self.max) | |
class BoolParamType(ParamType): | |
name = 'boolean' | |
def convert(self, value, param, ctx): | |
if isinstance(value, bool): | |
return bool(value) | |
value = value.lower() | |
if value in ('true', '1', 'yes', 'y'): | |
return True | |
elif value in ('false', '0', 'no', 'n'): | |
return False | |
self.fail('%s is not a valid boolean' % value, param, ctx) | |
def __repr__(self): | |
return 'BOOL' | |
class FloatParamType(ParamType): | |
name = 'float' | |
def convert(self, value, param, ctx): | |
try: | |
return float(value) | |
except (UnicodeError, ValueError): | |
self.fail('%s is not a valid floating point value' % | |
value, param, ctx) | |
def __repr__(self): | |
return 'FLOAT' | |
class UUIDParameterType(ParamType): | |
name = 'uuid' | |
def convert(self, value, param, ctx): | |
import uuid | |
try: | |
if PY2 and isinstance(value, text_type): | |
value = value.encode('ascii') | |
return uuid.UUID(value) | |
except (UnicodeError, ValueError): | |
self.fail('%s is not a valid UUID value' % value, param, ctx) | |
def __repr__(self): | |
return 'UUID' | |
class File(ParamType): | |
"""Declares a parameter to be a file for reading or writing. The file | |
is automatically closed once the context tears down (after the command | |
finished working). | |
Files can be opened for reading or writing. The special value ``-`` | |
indicates stdin or stdout depending on the mode. | |
By default, the file is opened for reading text data, but it can also be | |
opened in binary mode or for writing. The encoding parameter can be used | |
to force a specific encoding. | |
The `lazy` flag controls if the file should be opened immediately or | |
upon first IO. The default is to be non lazy for standard input and | |
output streams as well as files opened for reading, lazy otherwise. | |
Starting with Click 2.0, files can also be opened atomically in which | |
case all writes go into a separate file in the same folder and upon | |
completion the file will be moved over to the original location. This | |
is useful if a file regularly read by other users is modified. | |
See :ref:`file-args` for more information. | |
""" | |
name = 'filename' | |
envvar_list_splitter = os.path.pathsep | |
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None, | |
atomic=False): | |
self.mode = mode | |
self.encoding = encoding | |
self.errors = errors | |
self.lazy = lazy | |
self.atomic = atomic | |
def resolve_lazy_flag(self, value): | |
if self.lazy is not None: | |
return self.lazy | |
if value == '-': | |
return False | |
elif 'w' in self.mode: | |
return True | |
return False | |
def convert(self, value, param, ctx): | |
try: | |
if hasattr(value, 'read') or hasattr(value, 'write'): | |
return value | |
lazy = self.resolve_lazy_flag(value) | |
if lazy: | |
f = LazyFile(value, self.mode, self.encoding, self.errors, | |
atomic=self.atomic) | |
if ctx is not None: | |
ctx.call_on_close(f.close_intelligently) | |
return f | |
f, should_close = open_stream(value, self.mode, | |
self.encoding, self.errors, | |
atomic=self.atomic) | |
# If a context is provided, we automatically close the file | |
# at the end of the context execution (or flush out). If a | |
# context does not exist, it's the caller's responsibility to | |
# properly close the file. This for instance happens when the | |
# type is used with prompts. | |
if ctx is not None: | |
if should_close: | |
ctx.call_on_close(safecall(f.close)) | |
else: | |
ctx.call_on_close(safecall(f.flush)) | |
return f | |
except (IOError, OSError) as e: | |
self.fail('Could not open file: %s: %s' % ( | |
filename_to_ui(value), | |
get_streerror(e), | |
), param, ctx) | |
class Path(ParamType): | |
"""The path type is similar to the :class:`File` type but it performs | |
different checks. First of all, instead of returning an open file | |
handle it returns just the filename. Secondly, it can perform various | |
basic checks about what the file or directory should be. | |
.. versionchanged:: 6.0 | |
`allow_dash` was added. | |
:param exists: if set to true, the file or directory needs to exist for | |
this value to be valid. If this is not required and a | |
file does indeed not exist, then all further checks are | |
silently skipped. | |
:param file_okay: controls if a file is a possible value. | |
:param dir_okay: controls if a directory is a possible value. | |
:param writable: if true, a writable check is performed. | |
:param readable: if true, a readable check is performed. | |
:param resolve_path: if this is true, then the path is fully resolved | |
before the value is passed onwards. This means | |
that it's absolute and symlinks are resolved. | |
:param allow_dash: If this is set to `True`, a single dash to indicate | |
standard streams is permitted. | |
:param type: optionally a string type that should be used to | |
represent the path. The default is `None` which | |
means the return value will be either bytes or | |
unicode depending on what makes most sense given the | |
input data Click deals with. | |
""" | |
envvar_list_splitter = os.path.pathsep | |
def __init__(self, exists=False, file_okay=True, dir_okay=True, | |
writable=False, readable=True, resolve_path=False, | |
allow_dash=False, path_type=None): | |
self.exists = exists | |
self.file_okay = file_okay | |
self.dir_okay = dir_okay | |
self.writable = writable | |
self.readable = readable | |
self.resolve_path = resolve_path | |
self.allow_dash = allow_dash | |
self.type = path_type | |
if self.file_okay and not self.dir_okay: | |
self.name = 'file' | |
self.path_type = 'File' | |
if self.dir_okay and not self.file_okay: | |
self.name = 'directory' | |
self.path_type = 'Directory' | |
else: | |
self.name = 'path' | |
self.path_type = 'Path' | |
def coerce_path_result(self, rv): | |
if self.type is not None and not isinstance(rv, self.type): | |
if self.type is text_type: | |
rv = rv.decode(get_filesystem_encoding()) | |
else: | |
rv = rv.encode(get_filesystem_encoding()) | |
return rv | |
def convert(self, value, param, ctx): | |
rv = value | |
is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-') | |
if not is_dash: | |
if self.resolve_path: | |
rv = os.path.realpath(rv) | |
try: | |
st = os.stat(rv) | |
except OSError: | |
if not self.exists: | |
return self.coerce_path_result(rv) | |
self.fail('%s "%s" does not exist.' % ( | |
self.path_type, | |
filename_to_ui(value) | |
), param, ctx) | |
if not self.file_okay and stat.S_ISREG(st.st_mode): | |
self.fail('%s "%s" is a file.' % ( | |
self.path_type, | |
filename_to_ui(value) | |
), param, ctx) | |
if not self.dir_okay and stat.S_ISDIR(st.st_mode): | |
self.fail('%s "%s" is a directory.' % ( | |
self.path_type, | |
filename_to_ui(value) | |
), param, ctx) | |
if self.writable and not os.access(value, os.W_OK): | |
self.fail('%s "%s" is not writable.' % ( | |
self.path_type, | |
filename_to_ui(value) | |
), param, ctx) | |
if self.readable and not os.access(value, os.R_OK): | |
self.fail('%s "%s" is not readable.' % ( | |
self.path_type, | |
filename_to_ui(value) | |
), param, ctx) | |
return self.coerce_path_result(rv) | |
class Tuple(CompositeParamType): | |
"""The default behavior of Click is to apply a type on a value directly. | |
This works well in most cases, except for when `nargs` is set to a fixed | |
count and different types should be used for different items. In this | |
case the :class:`Tuple` type can be used. This type can only be used | |
if `nargs` is set to a fixed number. | |
For more information see :ref:`tuple-type`. | |
This can be selected by using a Python tuple literal as a type. | |
:param types: a list of types that should be used for the tuple items. | |
""" | |
def __init__(self, types): | |
self.types = [convert_type(ty) for ty in types] | |
@property | |
def name(self): | |
return "<" + " ".join(ty.name for ty in self.types) + ">" | |
@property | |
def arity(self): | |
return len(self.types) | |
def convert(self, value, param, ctx): | |
if len(value) != len(self.types): | |
raise TypeError('It would appear that nargs is set to conflict ' | |
'with the composite type arity.') | |
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) | |
def convert_type(ty, default=None): | |
"""Converts a callable or python ty into the most appropriate param | |
ty. | |
""" | |
guessed_type = False | |
if ty is None and default is not None: | |
if isinstance(default, tuple): | |
ty = tuple(map(type, default)) | |
else: | |
ty = type(default) | |
guessed_type = True | |
if isinstance(ty, tuple): | |
return Tuple(ty) | |
if isinstance(ty, ParamType): | |
return ty | |
if ty is text_type or ty is str or ty is None: | |
return STRING | |
if ty is int: | |
return INT | |
# Booleans are only okay if not guessed. This is done because for | |
# flags the default value is actually a bit of a lie in that it | |
# indicates which of the flags is the one we want. See get_default() | |
# for more information. | |
if ty is bool and not guessed_type: | |
return BOOL | |
if ty is float: | |
return FLOAT | |
if guessed_type: | |
return STRING | |
# Catch a common mistake | |
if __debug__: | |
try: | |
if issubclass(ty, ParamType): | |
raise AssertionError('Attempted to use an uninstantiated ' | |
'parameter type (%s).' % ty) | |
except TypeError: | |
pass | |
return FuncParamType(ty) | |
#: A dummy parameter type that just does nothing. From a user's | |
#: perspective this appears to just be the same as `STRING` but internally | |
#: no string conversion takes place. This is necessary to achieve the | |
#: same bytes/unicode behavior on Python 2/3 in situations where you want | |
#: to not convert argument types. This is usually useful when working | |
#: with file paths as they can appear in bytes and unicode. | |
#: | |
#: For path related uses the :class:`Path` type is a better choice but | |
#: there are situations where an unprocessed type is useful which is why | |
#: it is is provided. | |
#: | |
#: .. versionadded:: 4.0 | |
UNPROCESSED = UnprocessedParamType() | |
#: A unicode string parameter type which is the implicit default. This | |
#: can also be selected by using ``str`` as type. | |
STRING = StringParamType() | |
#: An integer parameter. This can also be selected by using ``int`` as | |
#: type. | |
INT = IntParamType() | |
#: A floating point value parameter. This can also be selected by using | |
#: ``float`` as type. | |
FLOAT = FloatParamType() | |
#: A boolean parameter. This is the default for boolean flags. This can | |
#: also be selected by using ``bool`` as a type. | |
BOOL = BoolParamType() | |
#: A UUID parameter. | |
UUID = UUIDParameterType() |
import os | |
import sys | |
from .globals import resolve_color_default | |
from ._compat import text_type, open_stream, get_filesystem_encoding, \ | |
get_streerror, string_types, PY2, binary_streams, text_streams, \ | |
filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \ | |
_default_text_stdout, _default_text_stderr, is_bytes, WIN | |
if not PY2: | |
from ._compat import _find_binary_writer | |
elif WIN: | |
from ._winconsole import _get_windows_argv, \ | |
_hash_py_argv, _initial_argv_hash | |
echo_native_types = string_types + (bytes, bytearray) | |
def _posixify(name): | |
return '-'.join(name.split()).lower() | |
def safecall(func): | |
"""Wraps a function so that it swallows exceptions.""" | |
def wrapper(*args, **kwargs): | |
try: | |
return func(*args, **kwargs) | |
except Exception: | |
pass | |
return wrapper | |
def make_str(value): | |
"""Converts a value into a valid string.""" | |
if isinstance(value, bytes): | |
try: | |
return value.decode(get_filesystem_encoding()) | |
except UnicodeError: | |
return value.decode('utf-8', 'replace') | |
return text_type(value) | |
def make_default_short_help(help, max_length=45): | |
words = help.split() | |
total_length = 0 | |
result = [] | |
done = False | |
for word in words: | |
if word[-1:] == '.': | |
done = True | |
new_length = result and 1 + len(word) or len(word) | |
if total_length + new_length > max_length: | |
result.append('...') | |
done = True | |
else: | |
if result: | |
result.append(' ') | |
result.append(word) | |
if done: | |
break | |
total_length += new_length | |
return ''.join(result) | |
class LazyFile(object): | |
"""A lazy file works like a regular file but it does not fully open | |
the file but it does perform some basic checks early to see if the | |
filename parameter does make sense. This is useful for safely opening | |
files for writing. | |
""" | |
def __init__(self, filename, mode='r', encoding=None, errors='strict', | |
atomic=False): | |
self.name = filename | |
self.mode = mode | |
self.encoding = encoding | |
self.errors = errors | |
self.atomic = atomic | |
if filename == '-': | |
self._f, self.should_close = open_stream(filename, mode, | |
encoding, errors) | |
else: | |
if 'r' in mode: | |
# Open and close the file in case we're opening it for | |
# reading so that we can catch at least some errors in | |
# some cases early. | |
open(filename, mode).close() | |
self._f = None | |
self.should_close = True | |
def __getattr__(self, name): | |
return getattr(self.open(), name) | |
def __repr__(self): | |
if self._f is not None: | |
return repr(self._f) | |
return '<unopened file %r %s>' % (self.name, self.mode) | |
def open(self): | |
"""Opens the file if it's not yet open. This call might fail with | |
a :exc:`FileError`. Not handling this error will produce an error | |
that Click shows. | |
""" | |
if self._f is not None: | |
return self._f | |
try: | |
rv, self.should_close = open_stream(self.name, self.mode, | |
self.encoding, | |
self.errors, | |
atomic=self.atomic) | |
except (IOError, OSError) as e: | |
from .exceptions import FileError | |
raise FileError(self.name, hint=get_streerror(e)) | |
self._f = rv | |
return rv | |
def close(self): | |
"""Closes the underlying file, no matter what.""" | |
if self._f is not None: | |
self._f.close() | |
def close_intelligently(self): | |
"""This function only closes the file if it was opened by the lazy | |
file wrapper. For instance this will never close stdin. | |
""" | |
if self.should_close: | |
self.close() | |
def __enter__(self): | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self.close_intelligently() | |
def __iter__(self): | |
self.open() | |
return iter(self._f) | |
class KeepOpenFile(object): | |
def __init__(self, file): | |
self._file = file | |
def __getattr__(self, name): | |
return getattr(self._file, name) | |
def __enter__(self): | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
pass | |
def __repr__(self): | |
return repr(self._file) | |
def __iter__(self): | |
return iter(self._file) | |
def echo(message=None, file=None, nl=True, err=False, color=None): | |
"""Prints a message plus a newline to the given file or stdout. On | |
first sight, this looks like the print function, but it has improved | |
support for handling Unicode and binary data that does not fail no | |
matter how badly configured the system is. | |
Primarily it means that you can print binary data as well as Unicode | |
data on both 2.x and 3.x to the given file in the most appropriate way | |
possible. This is a very carefree function as in that it will try its | |
best to not fail. As of Click 6.0 this includes support for unicode | |
output on the Windows console. | |
In addition to that, if `colorama`_ is installed, the echo function will | |
also support clever handling of ANSI codes. Essentially it will then | |
do the following: | |
- add transparent handling of ANSI color codes on Windows. | |
- hide ANSI codes automatically if the destination file is not a | |
terminal. | |
.. _colorama: http://pypi.python.org/pypi/colorama | |
.. versionchanged:: 6.0 | |
As of Click 6.0 the echo function will properly support unicode | |
output on the windows console. Not that click does not modify | |
the interpreter in any way which means that `sys.stdout` or the | |
print statement or function will still not provide unicode support. | |
.. versionchanged:: 2.0 | |
Starting with version 2.0 of Click, the echo function will work | |
with colorama if it's installed. | |
.. versionadded:: 3.0 | |
The `err` parameter was added. | |
.. versionchanged:: 4.0 | |
Added the `color` flag. | |
:param message: the message to print | |
:param file: the file to write to (defaults to ``stdout``) | |
:param err: if set to true the file defaults to ``stderr`` instead of | |
``stdout``. This is faster and easier than calling | |
:func:`get_text_stderr` yourself. | |
:param nl: if set to `True` (the default) a newline is printed afterwards. | |
:param color: controls if the terminal supports ANSI colors or not. The | |
default is autodetection. | |
""" | |
if file is None: | |
if err: | |
file = _default_text_stderr() | |
else: | |
file = _default_text_stdout() | |
# Convert non bytes/text into the native string type. | |
if message is not None and not isinstance(message, echo_native_types): | |
message = text_type(message) | |
if nl: | |
message = message or u'' | |
if isinstance(message, text_type): | |
message += u'\n' | |
else: | |
message += b'\n' | |
# If there is a message, and we're in Python 3, and the value looks | |
# like bytes, we manually need to find the binary stream and write the | |
# message in there. This is done separately so that most stream | |
# types will work as you would expect. Eg: you can write to StringIO | |
# for other cases. | |
if message and not PY2 and is_bytes(message): | |
binary_file = _find_binary_writer(file) | |
if binary_file is not None: | |
file.flush() | |
binary_file.write(message) | |
binary_file.flush() | |
return | |
# ANSI-style support. If there is no message or we are dealing with | |
# bytes nothing is happening. If we are connected to a file we want | |
# to strip colors. If we are on windows we either wrap the stream | |
# to strip the color or we use the colorama support to translate the | |
# ansi codes to API calls. | |
if message and not is_bytes(message): | |
color = resolve_color_default(color) | |
if should_strip_ansi(file, color): | |
message = strip_ansi(message) | |
elif WIN: | |
if auto_wrap_for_ansi is not None: | |
file = auto_wrap_for_ansi(file) | |
elif not color: | |
message = strip_ansi(message) | |
if message: | |
file.write(message) | |
file.flush() | |
def get_binary_stream(name): | |
"""Returns a system stream for byte processing. This essentially | |
returns the stream from the sys module with the given name but it | |
solves some compatibility issues between different Python versions. | |
Primarily this function is necessary for getting binary streams on | |
Python 3. | |
:param name: the name of the stream to open. Valid names are ``'stdin'``, | |
``'stdout'`` and ``'stderr'`` | |
""" | |
opener = binary_streams.get(name) | |
if opener is None: | |
raise TypeError('Unknown standard stream %r' % name) | |
return opener() | |
def get_text_stream(name, encoding=None, errors='strict'): | |
"""Returns a system stream for text processing. This usually returns | |
a wrapped stream around a binary stream returned from | |
:func:`get_binary_stream` but it also can take shortcuts on Python 3 | |
for already correctly configured streams. | |
:param name: the name of the stream to open. Valid names are ``'stdin'``, | |
``'stdout'`` and ``'stderr'`` | |
:param encoding: overrides the detected default encoding. | |
:param errors: overrides the default error mode. | |
""" | |
opener = text_streams.get(name) | |
if opener is None: | |
raise TypeError('Unknown standard stream %r' % name) | |
return opener(encoding, errors) | |
def open_file(filename, mode='r', encoding=None, errors='strict', | |
lazy=False, atomic=False): | |
"""This is similar to how the :class:`File` works but for manual | |
usage. Files are opened non lazy by default. This can open regular | |
files as well as stdin/stdout if ``'-'`` is passed. | |
If stdin/stdout is returned the stream is wrapped so that the context | |
manager will not close the stream accidentally. This makes it possible | |
to always use the function like this without having to worry to | |
accidentally close a standard stream:: | |
with open_file(filename) as f: | |
... | |
.. versionadded:: 3.0 | |
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout). | |
:param mode: the mode in which to open the file. | |
:param encoding: the encoding to use. | |
:param errors: the error handling for this file. | |
:param lazy: can be flipped to true to open the file lazily. | |
:param atomic: in atomic mode writes go into a temporary file and it's | |
moved on close. | |
""" | |
if lazy: | |
return LazyFile(filename, mode, encoding, errors, atomic=atomic) | |
f, should_close = open_stream(filename, mode, encoding, errors, | |
atomic=atomic) | |
if not should_close: | |
f = KeepOpenFile(f) | |
return f | |
def get_os_args(): | |
"""This returns the argument part of sys.argv in the most appropriate | |
form for processing. What this means is that this return value is in | |
a format that works for Click to process but does not necessarily | |
correspond well to what's actually standard for the interpreter. | |
On most environments the return value is ``sys.argv[:1]`` unchanged. | |
However if you are on Windows and running Python 2 the return value | |
will actually be a list of unicode strings instead because the | |
default behavior on that platform otherwise will not be able to | |
carry all possible values that sys.argv can have. | |
.. versionadded:: 6.0 | |
""" | |
# We can only extract the unicode argv if sys.argv has not been | |
# changed since the startup of the application. | |
if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): | |
return _get_windows_argv() | |
return sys.argv[1:] | |
def format_filename(filename, shorten=False): | |
"""Formats a filename for user display. The main purpose of this | |
function is to ensure that the filename can be displayed at all. This | |
will decode the filename to unicode if necessary in a way that it will | |
not fail. Optionally, it can shorten the filename to not include the | |
full path to the filename. | |
:param filename: formats a filename for UI display. This will also convert | |
the filename into unicode without failing. | |
:param shorten: this optionally shortens the filename to strip of the | |
path that leads up to it. | |
""" | |
if shorten: | |
filename = os.path.basename(filename) | |
return filename_to_ui(filename) | |
def get_app_dir(app_name, roaming=True, force_posix=False): | |
r"""Returns the config folder for the application. The default behavior | |
is to return whatever is most appropriate for the operating system. | |
To give you an idea, for an app called ``"Foo Bar"``, something like | |
the following folders could be returned: | |
Mac OS X: | |
``~/Library/Application Support/Foo Bar`` | |
Mac OS X (POSIX): | |
``~/.foo-bar`` | |
Unix: | |
``~/.config/foo-bar`` | |
Unix (POSIX): | |
``~/.foo-bar`` | |
Win XP (roaming): | |
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar`` | |
Win XP (not roaming): | |
``C:\Documents and Settings\<user>\Application Data\Foo Bar`` | |
Win 7 (roaming): | |
``C:\Users\<user>\AppData\Roaming\Foo Bar`` | |
Win 7 (not roaming): | |
``C:\Users\<user>\AppData\Local\Foo Bar`` | |
.. versionadded:: 2.0 | |
:param app_name: the application name. This should be properly capitalized | |
and can contain whitespace. | |
:param roaming: controls if the folder should be roaming or not on Windows. | |
Has no affect otherwise. | |
:param force_posix: if this is set to `True` then on any POSIX system the | |
folder will be stored in the home folder with a leading | |
dot instead of the XDG config home or darwin's | |
application support folder. | |
""" | |
if WIN: | |
key = roaming and 'APPDATA' or 'LOCALAPPDATA' | |
folder = os.environ.get(key) | |
if folder is None: | |
folder = os.path.expanduser('~') | |
return os.path.join(folder, app_name) | |
if force_posix: | |
return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) | |
if sys.platform == 'darwin': | |
return os.path.join(os.path.expanduser( | |
'~/Library/Application Support'), app_name) | |
return os.path.join( | |
os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), | |
_posixify(app_name)) |
./setuptools-39.1.0-py3.7.egg | |
./pip-10.0.1-py3.7.egg |
[console_scripts] | |
flask = flask.cli:main | |
pip |
Copyright © 2010 by the Pallets team. | |
Some rights reserved. | |
Redistribution and use in source and binary forms of the software as | |
well as documentation, with or without modification, are permitted | |
provided that the following conditions are met: | |
* Redistributions of source code must retain the above copyright notice, | |
this list of conditions and the following disclaimer. | |
* Redistributions in binary form must reproduce the above copyright | |
notice, this list of conditions and the following disclaimer in the | |
documentation and/or other materials provided with the distribution. | |
* Neither the name of the copyright holder nor the names of its | |
contributors may be used to endorse or promote products derived from | |
this software without specific prior written permission. | |
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND | |
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, | |
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND | |
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE | |
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, | |
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT | |
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF | |
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF | |
SUCH DAMAGE. |
Metadata-Version: 2.1 | |
Name: Flask | |
Version: 1.0.2 | |
Summary: A simple framework for building complex web applications. | |
Home-page: https://www.palletsprojects.com/p/flask/ | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
Maintainer: Pallets team | |
Maintainer-email: contact@palletsprojects.com | |
License: BSD | |
Project-URL: Documentation, http://flask.pocoo.org/docs/ | |
Project-URL: Code, https://github.com/pallets/flask | |
Project-URL: Issue tracker, https://github.com/pallets/flask/issues | |
Platform: any | |
Classifier: Development Status :: 5 - Production/Stable | |
Classifier: Environment :: Web Environment | |
Classifier: Framework :: Flask | |
Classifier: Intended Audience :: Developers | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Operating System :: OS Independent | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 2 | |
Classifier: Programming Language :: Python :: 2.7 | |
Classifier: Programming Language :: Python :: 3 | |
Classifier: Programming Language :: Python :: 3.4 | |
Classifier: Programming Language :: Python :: 3.5 | |
Classifier: Programming Language :: Python :: 3.6 | |
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content | |
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application | |
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks | |
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |
Provides-Extra: dev | |
Provides-Extra: docs | |
Provides-Extra: dotenv | |
Requires-Dist: Werkzeug (>=0.14) | |
Requires-Dist: Jinja2 (>=2.10) | |
Requires-Dist: itsdangerous (>=0.24) | |
Requires-Dist: click (>=5.1) | |
Provides-Extra: dev | |
Requires-Dist: pytest (>=3); extra == 'dev' | |
Requires-Dist: coverage; extra == 'dev' | |
Requires-Dist: tox; extra == 'dev' | |
Requires-Dist: sphinx; extra == 'dev' | |
Requires-Dist: pallets-sphinx-themes; extra == 'dev' | |
Requires-Dist: sphinxcontrib-log-cabinet; extra == 'dev' | |
Provides-Extra: docs | |
Requires-Dist: sphinx; extra == 'docs' | |
Requires-Dist: pallets-sphinx-themes; extra == 'docs' | |
Requires-Dist: sphinxcontrib-log-cabinet; extra == 'docs' | |
Provides-Extra: dotenv | |
Requires-Dist: python-dotenv; extra == 'dotenv' | |
Flask | |
===== | |
Flask is a lightweight `WSGI`_ web application framework. It is designed | |
to make getting started quick and easy, with the ability to scale up to | |
complex applications. It began as a simple wrapper around `Werkzeug`_ | |
and `Jinja`_ and has become one of the most popular Python web | |
application frameworks. | |
Flask offers suggestions, but doesn't enforce any dependencies or | |
project layout. It is up to the developer to choose the tools and | |
libraries they want to use. There are many extensions provided by the | |
community that make adding new functionality easy. | |
Installing | |
---------- | |
Install and update using `pip`_: | |
.. code-block:: text | |
pip install -U Flask | |
A Simple Example | |
---------------- | |
.. code-block:: python | |
from flask import Flask | |
app = Flask(__name__) | |
@app.route('/') | |
def hello(): | |
return 'Hello, World!' | |
.. code-block:: text | |
$ FLASK_APP=hello.py flask run | |
* Serving Flask app "hello" | |
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) | |
Donate | |
------ | |
The Pallets organization develops and supports Flask and the libraries | |
it uses. In order to grow the community of contributors and users, and | |
allow the maintainers to devote more time to the projects, `please | |
donate today`_. | |
.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 | |
Links | |
----- | |
* Website: https://www.palletsprojects.com/p/flask/ | |
* Documentation: http://flask.pocoo.org/docs/ | |
* License: `BSD <https://github.com/pallets/flask/blob/master/LICENSE>`_ | |
* Releases: https://pypi.org/project/Flask/ | |
* Code: https://github.com/pallets/flask | |
* Issue tracker: https://github.com/pallets/flask/issues | |
* Test status: | |
* Linux, Mac: https://travis-ci.org/pallets/flask | |
* Windows: https://ci.appveyor.com/project/pallets/flask | |
* Test coverage: https://codecov.io/gh/pallets/flask | |
.. _WSGI: https://wsgi.readthedocs.io | |
.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/ | |
.. _Jinja: https://www.palletsprojects.com/p/jinja/ | |
.. _pip: https://pip.pypa.io/en/stable/quickstart/ | |
Flask-1.0.2.dist-info/LICENSE.txt,sha256=ziEXA3AIuaiUn1qe4cd1XxCESWTYrk4TjN7Qb06J3l8,1575 | |
Flask-1.0.2.dist-info/METADATA,sha256=iA5tiNWzTtgCVe80aTZGNWsckj853fJyfvHs9U-WZRk,4182 | |
Flask-1.0.2.dist-info/RECORD,, | |
Flask-1.0.2.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 | |
Flask-1.0.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 | |
Flask-1.0.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 | |
flask/__init__.py,sha256=qq8lK6QQbxJALf1igz7qsvUwOTAoKvFGfdLm7jPNsso,1673 | |
flask/__main__.py,sha256=pgIXrHhxM5MAMvgzAqWpw_t6AXZ1zG38us4JRgJKtxk,291 | |
flask/_compat.py,sha256=UDFGhosh6mOdNB-4evKPuneHum1OpcAlwTNJCRm0irQ,2892 | |
flask/app.py,sha256=ahpe3T8w98rQd_Er5d7uDxK57S1nnqGQx3V3hirBovU,94147 | |
flask/blueprints.py,sha256=Cyhl_x99tgwqEZPtNDJUFneAfVJxWfEU4bQA7zWS6VU,18331 | |
flask/cli.py,sha256=30QYAO10Do9LbZYCLgfI_xhKjASdLopL8wKKVUGS2oA,29442 | |
flask/config.py,sha256=kznUhj4DLYxsTF_4kfDG8GEHto1oZG_kqblyrLFtpqQ,9951 | |
flask/ctx.py,sha256=leFzS9fzmo0uaLCdxpHc5_iiJZ1H0X_Ig4yPCOvT--g,16224 | |
flask/debughelpers.py,sha256=1ceC-UyqZTd4KsJkf0OObHPsVt5R3T6vnmYhiWBjV-w,6479 | |
flask/globals.py,sha256=pGg72QW_-4xUfsI33I5L_y76c21AeqfSqXDcbd8wvXU,1649 | |
flask/helpers.py,sha256=YCl8D1plTO1evEYP4KIgaY3H8Izww5j4EdgRJ89oHTw,40106 | |
flask/logging.py,sha256=qV9h0vt7NIRkKM9OHDWndzO61E5CeBMlqPJyTt-W2Wc,2231 | |
flask/sessions.py,sha256=2XHV4ASREhSEZ8bsPQW6pNVNuFtbR-04BzfKg0AfvHo,14452 | |
flask/signals.py,sha256=BGQbVyCYXnzKK2DVCzppKFyWN1qmrtW1QMAYUs-1Nr8,2211 | |
flask/templating.py,sha256=FDfWMbpgpC3qObW8GGXRAVrkHFF8K4CHOJymB1wvULI,4914 | |
flask/testing.py,sha256=XD3gWNvLUV8dqVHwKd9tZzsj81fSHtjOphQ1wTNtlMs,9379 | |
flask/views.py,sha256=Wy-_WkUVtCfE2zCXYeJehNgHuEtviE4v3HYfJ--MpbY,5733 | |
flask/wrappers.py,sha256=1Z9hF5-hXQajn_58XITQFRY8efv3Vy3uZ0avBfZu6XI,7511 | |
flask/json/__init__.py,sha256=Ns1Hj805XIxuBMh2z0dYnMVfb_KUgLzDmP3WoUYaPhw,10729 | |
flask/json/tag.py,sha256=9ehzrmt5k7hxf7ZEK0NOs3swvQyU9fWNe-pnYe69N60,8223 | |
../../Scripts/flask.exe,sha256=5bX4p1Qnv8eeVA4cSdnRqIl97bk_9winXYLaKPLzJKg,102777 | |
Flask-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |
flask/json/__pycache__/tag.cpython-37.pyc,, | |
flask/json/__pycache__/__init__.cpython-37.pyc,, | |
flask/__pycache__/app.cpython-37.pyc,, | |
flask/__pycache__/blueprints.cpython-37.pyc,, | |
flask/__pycache__/cli.cpython-37.pyc,, | |
flask/__pycache__/config.cpython-37.pyc,, | |
flask/__pycache__/ctx.cpython-37.pyc,, | |
flask/__pycache__/debughelpers.cpython-37.pyc,, | |
flask/__pycache__/globals.cpython-37.pyc,, | |
flask/__pycache__/helpers.cpython-37.pyc,, | |
flask/__pycache__/logging.cpython-37.pyc,, | |
flask/__pycache__/sessions.cpython-37.pyc,, | |
flask/__pycache__/signals.cpython-37.pyc,, | |
flask/__pycache__/templating.cpython-37.pyc,, | |
flask/__pycache__/testing.cpython-37.pyc,, | |
flask/__pycache__/views.cpython-37.pyc,, | |
flask/__pycache__/wrappers.cpython-37.pyc,, | |
flask/__pycache__/_compat.cpython-37.pyc,, | |
flask/__pycache__/__init__.cpython-37.pyc,, | |
flask/__pycache__/__main__.cpython-37.pyc,, |
flask |
Wheel-Version: 1.0 | |
Generator: bdist_wheel (0.31.0) | |
Root-Is-Purelib: true | |
Tag: py2-none-any | |
Tag: py3-none-any | |
# -*- coding: utf-8 -*- | |
""" | |
flask | |
~~~~~ | |
A microframework based on Werkzeug. It's extensively documented | |
and follows best practice patterns. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
__version__ = '1.0.2' | |
# utilities we import from Werkzeug and Jinja2 that are unused | |
# in the module but are exported as public interface. | |
from werkzeug.exceptions import abort | |
from werkzeug.utils import redirect | |
from jinja2 import Markup, escape | |
from .app import Flask, Request, Response | |
from .config import Config | |
from .helpers import url_for, flash, send_file, send_from_directory, \ | |
get_flashed_messages, get_template_attribute, make_response, safe_join, \ | |
stream_with_context | |
from .globals import current_app, g, request, session, _request_ctx_stack, \ | |
_app_ctx_stack | |
from .ctx import has_request_context, has_app_context, \ | |
after_this_request, copy_current_request_context | |
from .blueprints import Blueprint | |
from .templating import render_template, render_template_string | |
# the signals | |
from .signals import signals_available, template_rendered, request_started, \ | |
request_finished, got_request_exception, request_tearing_down, \ | |
appcontext_tearing_down, appcontext_pushed, \ | |
appcontext_popped, message_flashed, before_render_template | |
# We're not exposing the actual json module but a convenient wrapper around | |
# it. | |
from . import json | |
# This was the only thing that Flask used to export at one point and it had | |
# a more generic name. | |
jsonify = json.jsonify | |
# backwards compat, goes away in 1.0 | |
from .sessions import SecureCookieSession as Session | |
json_available = True |
# -*- coding: utf-8 -*- | |
""" | |
flask.__main__ | |
~~~~~~~~~~~~~~ | |
Alias for flask.run for the command line. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
if __name__ == '__main__': | |
from .cli import main | |
main(as_module=True) |
# -*- coding: utf-8 -*- | |
""" | |
flask._compat | |
~~~~~~~~~~~~~ | |
Some py2/py3 compatibility support based on a stripped down | |
version of six so we don't have to depend on a specific version | |
of it. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import sys | |
PY2 = sys.version_info[0] == 2 | |
_identity = lambda x: x | |
if not PY2: | |
text_type = str | |
string_types = (str,) | |
integer_types = (int,) | |
iterkeys = lambda d: iter(d.keys()) | |
itervalues = lambda d: iter(d.values()) | |
iteritems = lambda d: iter(d.items()) | |
from inspect import getfullargspec as getargspec | |
from io import StringIO | |
def reraise(tp, value, tb=None): | |
if value.__traceback__ is not tb: | |
raise value.with_traceback(tb) | |
raise value | |
implements_to_string = _identity | |
else: | |
text_type = unicode | |
string_types = (str, unicode) | |
integer_types = (int, long) | |
iterkeys = lambda d: d.iterkeys() | |
itervalues = lambda d: d.itervalues() | |
iteritems = lambda d: d.iteritems() | |
from inspect import getargspec | |
from cStringIO import StringIO | |
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') | |
def implements_to_string(cls): | |
cls.__unicode__ = cls.__str__ | |
cls.__str__ = lambda x: x.__unicode__().encode('utf-8') | |
return cls | |
def with_metaclass(meta, *bases): | |
"""Create a base class with a metaclass.""" | |
# This requires a bit of explanation: the basic idea is to make a | |
# dummy metaclass for one level of class instantiation that replaces | |
# itself with the actual metaclass. | |
class metaclass(type): | |
def __new__(cls, name, this_bases, d): | |
return meta(name, bases, d) | |
return type.__new__(metaclass, 'temporary_class', (), {}) | |
# Certain versions of pypy have a bug where clearing the exception stack | |
# breaks the __exit__ function in a very peculiar way. The second level of | |
# exception blocks is necessary because pypy seems to forget to check if an | |
# exception happened until the next bytecode instruction? | |
# | |
# Relevant PyPy bugfix commit: | |
# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301 | |
# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later | |
# versions. | |
# | |
# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug. | |
BROKEN_PYPY_CTXMGR_EXIT = False | |
if hasattr(sys, 'pypy_version_info'): | |
class _Mgr(object): | |
def __enter__(self): | |
return self | |
def __exit__(self, *args): | |
if hasattr(sys, 'exc_clear'): | |
# Python 3 (PyPy3) doesn't have exc_clear | |
sys.exc_clear() | |
try: | |
try: | |
with _Mgr(): | |
raise AssertionError() | |
except: | |
raise | |
except TypeError: | |
BROKEN_PYPY_CTXMGR_EXIT = True | |
except AssertionError: | |
pass |
# -*- coding: utf-8 -*- | |
""" | |
flask.app | |
~~~~~~~~~ | |
This module implements the central WSGI application object. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import sys | |
import warnings | |
from datetime import timedelta | |
from functools import update_wrapper | |
from itertools import chain | |
from threading import Lock | |
from werkzeug.datastructures import Headers, ImmutableDict | |
from werkzeug.exceptions import BadRequest, BadRequestKeyError, HTTPException, \ | |
InternalServerError, MethodNotAllowed, default_exceptions | |
from werkzeug.routing import BuildError, Map, RequestRedirect, Rule | |
from . import cli, json | |
from ._compat import integer_types, reraise, string_types, text_type | |
from .config import Config, ConfigAttribute | |
from .ctx import AppContext, RequestContext, _AppCtxGlobals | |
from .globals import _request_ctx_stack, g, request, session | |
from .helpers import ( | |
_PackageBoundObject, | |
_endpoint_from_view_func, find_package, get_env, get_debug_flag, | |
get_flashed_messages, locked_cached_property, url_for, get_load_dotenv | |
) | |
from .logging import create_logger | |
from .sessions import SecureCookieSessionInterface | |
from .signals import appcontext_tearing_down, got_request_exception, \ | |
request_finished, request_started, request_tearing_down | |
from .templating import DispatchingJinjaLoader, Environment, \ | |
_default_template_ctx_processor | |
from .wrappers import Request, Response | |
# a singleton sentinel value for parameter defaults | |
_sentinel = object() | |
def _make_timedelta(value): | |
if not isinstance(value, timedelta): | |
return timedelta(seconds=value) | |
return value | |
def setupmethod(f): | |
"""Wraps a method so that it performs a check in debug mode if the | |
first request was already handled. | |
""" | |
def wrapper_func(self, *args, **kwargs): | |
if self.debug and self._got_first_request: | |
raise AssertionError('A setup function was called after the ' | |
'first request was handled. This usually indicates a bug ' | |
'in the application where a module was not imported ' | |
'and decorators or other functionality was called too late.\n' | |
'To fix this make sure to import all your view modules, ' | |
'database models and everything related at a central place ' | |
'before the application starts serving requests.') | |
return f(self, *args, **kwargs) | |
return update_wrapper(wrapper_func, f) | |
class Flask(_PackageBoundObject): | |
"""The flask object implements a WSGI application and acts as the central | |
object. It is passed the name of the module or package of the | |
application. Once it is created it will act as a central registry for | |
the view functions, the URL rules, template configuration and much more. | |
The name of the package is used to resolve resources from inside the | |
package or the folder the module is contained in depending on if the | |
package parameter resolves to an actual python package (a folder with | |
an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). | |
For more information about resource loading, see :func:`open_resource`. | |
Usually you create a :class:`Flask` instance in your main module or | |
in the :file:`__init__.py` file of your package like this:: | |
from flask import Flask | |
app = Flask(__name__) | |
.. admonition:: About the First Parameter | |
The idea of the first parameter is to give Flask an idea of what | |
belongs to your application. This name is used to find resources | |
on the filesystem, can be used by extensions to improve debugging | |
information and a lot more. | |
So it's important what you provide there. If you are using a single | |
module, `__name__` is always the correct value. If you however are | |
using a package, it's usually recommended to hardcode the name of | |
your package there. | |
For example if your application is defined in :file:`yourapplication/app.py` | |
you should create it with one of the two versions below:: | |
app = Flask('yourapplication') | |
app = Flask(__name__.split('.')[0]) | |
Why is that? The application will work even with `__name__`, thanks | |
to how resources are looked up. However it will make debugging more | |
painful. Certain extensions can make assumptions based on the | |
import name of your application. For example the Flask-SQLAlchemy | |
extension will look for the code in your application that triggered | |
an SQL query in debug mode. If the import name is not properly set | |
up, that debugging information is lost. (For example it would only | |
pick up SQL queries in `yourapplication.app` and not | |
`yourapplication.views.frontend`) | |
.. versionadded:: 0.7 | |
The `static_url_path`, `static_folder`, and `template_folder` | |
parameters were added. | |
.. versionadded:: 0.8 | |
The `instance_path` and `instance_relative_config` parameters were | |
added. | |
.. versionadded:: 0.11 | |
The `root_path` parameter was added. | |
.. versionadded:: 1.0 | |
The ``host_matching`` and ``static_host`` parameters were added. | |
.. versionadded:: 1.0 | |
The ``subdomain_matching`` parameter was added. Subdomain | |
matching needs to be enabled manually now. Setting | |
:data:`SERVER_NAME` does not implicitly enable it. | |
:param import_name: the name of the application package | |
:param static_url_path: can be used to specify a different path for the | |
static files on the web. Defaults to the name | |
of the `static_folder` folder. | |
:param static_folder: the folder with static files that should be served | |
at `static_url_path`. Defaults to the ``'static'`` | |
folder in the root path of the application. | |
:param static_host: the host to use when adding the static route. | |
Defaults to None. Required when using ``host_matching=True`` | |
with a ``static_folder`` configured. | |
:param host_matching: set ``url_map.host_matching`` attribute. | |
Defaults to False. | |
:param subdomain_matching: consider the subdomain relative to | |
:data:`SERVER_NAME` when matching routes. Defaults to False. | |
:param template_folder: the folder that contains the templates that should | |
be used by the application. Defaults to | |
``'templates'`` folder in the root path of the | |
application. | |
:param instance_path: An alternative instance path for the application. | |
By default the folder ``'instance'`` next to the | |
package or module is assumed to be the instance | |
path. | |
:param instance_relative_config: if set to ``True`` relative filenames | |
for loading the config are assumed to | |
be relative to the instance path instead | |
of the application root. | |
:param root_path: Flask by default will automatically calculate the path | |
to the root of the application. In certain situations | |
this cannot be achieved (for instance if the package | |
is a Python 3 namespace package) and needs to be | |
manually defined. | |
""" | |
#: The class that is used for request objects. See :class:`~flask.Request` | |
#: for more information. | |
request_class = Request | |
#: The class that is used for response objects. See | |
#: :class:`~flask.Response` for more information. | |
response_class = Response | |
#: The class that is used for the Jinja environment. | |
#: | |
#: .. versionadded:: 0.11 | |
jinja_environment = Environment | |
#: The class that is used for the :data:`~flask.g` instance. | |
#: | |
#: Example use cases for a custom class: | |
#: | |
#: 1. Store arbitrary attributes on flask.g. | |
#: 2. Add a property for lazy per-request database connectors. | |
#: 3. Return None instead of AttributeError on unexpected attributes. | |
#: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. | |
#: | |
#: In Flask 0.9 this property was called `request_globals_class` but it | |
#: was changed in 0.10 to :attr:`app_ctx_globals_class` because the | |
#: flask.g object is now application context scoped. | |
#: | |
#: .. versionadded:: 0.10 | |
app_ctx_globals_class = _AppCtxGlobals | |
#: The class that is used for the ``config`` attribute of this app. | |
#: Defaults to :class:`~flask.Config`. | |
#: | |
#: Example use cases for a custom class: | |
#: | |
#: 1. Default values for certain config options. | |
#: 2. Access to config values through attributes in addition to keys. | |
#: | |
#: .. versionadded:: 0.11 | |
config_class = Config | |
#: The testing flag. Set this to ``True`` to enable the test mode of | |
#: Flask extensions (and in the future probably also Flask itself). | |
#: For example this might activate test helpers that have an | |
#: additional runtime cost which should not be enabled by default. | |
#: | |
#: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the | |
#: default it's implicitly enabled. | |
#: | |
#: This attribute can also be configured from the config with the | |
#: ``TESTING`` configuration key. Defaults to ``False``. | |
testing = ConfigAttribute('TESTING') | |
#: If a secret key is set, cryptographic components can use this to | |
#: sign cookies and other things. Set this to a complex random value | |
#: when you want to use the secure cookie for instance. | |
#: | |
#: This attribute can also be configured from the config with the | |
#: :data:`SECRET_KEY` configuration key. Defaults to ``None``. | |
secret_key = ConfigAttribute('SECRET_KEY') | |
#: The secure cookie uses this for the name of the session cookie. | |
#: | |
#: This attribute can also be configured from the config with the | |
#: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` | |
session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME') | |
#: A :class:`~datetime.timedelta` which is used to set the expiration | |
#: date of a permanent session. The default is 31 days which makes a | |
#: permanent session survive for roughly one month. | |
#: | |
#: This attribute can also be configured from the config with the | |
#: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to | |
#: ``timedelta(days=31)`` | |
permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME', | |
get_converter=_make_timedelta) | |
#: A :class:`~datetime.timedelta` which is used as default cache_timeout | |
#: for the :func:`send_file` functions. The default is 12 hours. | |
#: | |
#: This attribute can also be configured from the config with the | |
#: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration | |
#: variable can also be set with an integer value used as seconds. | |
#: Defaults to ``timedelta(hours=12)`` | |
send_file_max_age_default = ConfigAttribute('SEND_FILE_MAX_AGE_DEFAULT', | |
get_converter=_make_timedelta) | |
#: Enable this if you want to use the X-Sendfile feature. Keep in | |
#: mind that the server has to support this. This only affects files | |
#: sent with the :func:`send_file` method. | |
#: | |
#: .. versionadded:: 0.2 | |
#: | |
#: This attribute can also be configured from the config with the | |
#: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. | |
use_x_sendfile = ConfigAttribute('USE_X_SENDFILE') | |
#: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. | |
#: | |
#: .. versionadded:: 0.10 | |
json_encoder = json.JSONEncoder | |
#: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. | |
#: | |
#: .. versionadded:: 0.10 | |
json_decoder = json.JSONDecoder | |
#: Options that are passed directly to the Jinja2 environment. | |
jinja_options = ImmutableDict( | |
extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_'] | |
) | |
#: Default configuration parameters. | |
default_config = ImmutableDict({ | |
'ENV': None, | |
'DEBUG': None, | |
'TESTING': False, | |
'PROPAGATE_EXCEPTIONS': None, | |
'PRESERVE_CONTEXT_ON_EXCEPTION': None, | |
'SECRET_KEY': None, | |
'PERMANENT_SESSION_LIFETIME': timedelta(days=31), | |
'USE_X_SENDFILE': False, | |
'SERVER_NAME': None, | |
'APPLICATION_ROOT': '/', | |
'SESSION_COOKIE_NAME': 'session', | |
'SESSION_COOKIE_DOMAIN': None, | |
'SESSION_COOKIE_PATH': None, | |
'SESSION_COOKIE_HTTPONLY': True, | |
'SESSION_COOKIE_SECURE': False, | |
'SESSION_COOKIE_SAMESITE': None, | |
'SESSION_REFRESH_EACH_REQUEST': True, | |
'MAX_CONTENT_LENGTH': None, | |
'SEND_FILE_MAX_AGE_DEFAULT': timedelta(hours=12), | |
'TRAP_BAD_REQUEST_ERRORS': None, | |
'TRAP_HTTP_EXCEPTIONS': False, | |
'EXPLAIN_TEMPLATE_LOADING': False, | |
'PREFERRED_URL_SCHEME': 'http', | |
'JSON_AS_ASCII': True, | |
'JSON_SORT_KEYS': True, | |
'JSONIFY_PRETTYPRINT_REGULAR': False, | |
'JSONIFY_MIMETYPE': 'application/json', | |
'TEMPLATES_AUTO_RELOAD': None, | |
'MAX_COOKIE_SIZE': 4093, | |
}) | |
#: The rule object to use for URL rules created. This is used by | |
#: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. | |
#: | |
#: .. versionadded:: 0.7 | |
url_rule_class = Rule | |
#: the test client that is used with when `test_client` is used. | |
#: | |
#: .. versionadded:: 0.7 | |
test_client_class = None | |
#: The :class:`~click.testing.CliRunner` subclass, by default | |
#: :class:`~flask.testing.FlaskCliRunner` that is used by | |
#: :meth:`test_cli_runner`. Its ``__init__`` method should take a | |
#: Flask app object as the first argument. | |
#: | |
#: .. versionadded:: 1.0 | |
test_cli_runner_class = None | |
#: the session interface to use. By default an instance of | |
#: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. | |
#: | |
#: .. versionadded:: 0.8 | |
session_interface = SecureCookieSessionInterface() | |
# TODO remove the next three attrs when Sphinx :inherited-members: works | |
# https://github.com/sphinx-doc/sphinx/issues/741 | |
#: The name of the package or module that this app belongs to. Do not | |
#: change this once it is set by the constructor. | |
import_name = None | |
#: Location of the template files to be added to the template lookup. | |
#: ``None`` if templates should not be added. | |
template_folder = None | |
#: Absolute path to the package on the filesystem. Used to look up | |
#: resources contained in the package. | |
root_path = None | |
def __init__( | |
self, | |
import_name, | |
static_url_path=None, | |
static_folder='static', | |
static_host=None, | |
host_matching=False, | |
subdomain_matching=False, | |
template_folder='templates', | |
instance_path=None, | |
instance_relative_config=False, | |
root_path=None | |
): | |
_PackageBoundObject.__init__( | |
self, | |
import_name, | |
template_folder=template_folder, | |
root_path=root_path | |
) | |
if static_url_path is not None: | |
self.static_url_path = static_url_path | |
if static_folder is not None: | |
self.static_folder = static_folder | |
if instance_path is None: | |
instance_path = self.auto_find_instance_path() | |
elif not os.path.isabs(instance_path): | |
raise ValueError( | |
'If an instance path is provided it must be absolute.' | |
' A relative path was given instead.' | |
) | |
#: Holds the path to the instance folder. | |
#: | |
#: .. versionadded:: 0.8 | |
self.instance_path = instance_path | |
#: The configuration dictionary as :class:`Config`. This behaves | |
#: exactly like a regular dictionary but supports additional methods | |
#: to load a config from files. | |
self.config = self.make_config(instance_relative_config) | |
#: A dictionary of all view functions registered. The keys will | |
#: be function names which are also used to generate URLs and | |
#: the values are the function objects themselves. | |
#: To register a view function, use the :meth:`route` decorator. | |
self.view_functions = {} | |
#: A dictionary of all registered error handlers. The key is ``None`` | |
#: for error handlers active on the application, otherwise the key is | |
#: the name of the blueprint. Each key points to another dictionary | |
#: where the key is the status code of the http exception. The | |
#: special key ``None`` points to a list of tuples where the first item | |
#: is the class for the instance check and the second the error handler | |
#: function. | |
#: | |
#: To register an error handler, use the :meth:`errorhandler` | |
#: decorator. | |
self.error_handler_spec = {} | |
#: A list of functions that are called when :meth:`url_for` raises a | |
#: :exc:`~werkzeug.routing.BuildError`. Each function registered here | |
#: is called with `error`, `endpoint` and `values`. If a function | |
#: returns ``None`` or raises a :exc:`BuildError` the next function is | |
#: tried. | |
#: | |
#: .. versionadded:: 0.9 | |
self.url_build_error_handlers = [] | |
#: A dictionary with lists of functions that will be called at the | |
#: beginning of each request. The key of the dictionary is the name of | |
#: the blueprint this function is active for, or ``None`` for all | |
#: requests. To register a function, use the :meth:`before_request` | |
#: decorator. | |
self.before_request_funcs = {} | |
#: A list of functions that will be called at the beginning of the | |
#: first request to this instance. To register a function, use the | |
#: :meth:`before_first_request` decorator. | |
#: | |
#: .. versionadded:: 0.8 | |
self.before_first_request_funcs = [] | |
#: A dictionary with lists of functions that should be called after | |
#: each request. The key of the dictionary is the name of the blueprint | |
#: this function is active for, ``None`` for all requests. This can for | |
#: example be used to close database connections. To register a function | |
#: here, use the :meth:`after_request` decorator. | |
self.after_request_funcs = {} | |
#: A dictionary with lists of functions that are called after | |
#: each request, even if an exception has occurred. The key of the | |
#: dictionary is the name of the blueprint this function is active for, | |
#: ``None`` for all requests. These functions are not allowed to modify | |
#: the request, and their return values are ignored. If an exception | |
#: occurred while processing the request, it gets passed to each | |
#: teardown_request function. To register a function here, use the | |
#: :meth:`teardown_request` decorator. | |
#: | |
#: .. versionadded:: 0.7 | |
self.teardown_request_funcs = {} | |
#: A list of functions that are called when the application context | |
#: is destroyed. Since the application context is also torn down | |
#: if the request ends this is the place to store code that disconnects | |
#: from databases. | |
#: | |
#: .. versionadded:: 0.9 | |
self.teardown_appcontext_funcs = [] | |
#: A dictionary with lists of functions that are called before the | |
#: :attr:`before_request_funcs` functions. The key of the dictionary is | |
#: the name of the blueprint this function is active for, or ``None`` | |
#: for all requests. To register a function, use | |
#: :meth:`url_value_preprocessor`. | |
#: | |
#: .. versionadded:: 0.7 | |
self.url_value_preprocessors = {} | |
#: A dictionary with lists of functions that can be used as URL value | |
#: preprocessors. The key ``None`` here is used for application wide | |
#: callbacks, otherwise the key is the name of the blueprint. | |
#: Each of these functions has the chance to modify the dictionary | |
#: of URL values before they are used as the keyword arguments of the | |
#: view function. For each function registered this one should also | |
#: provide a :meth:`url_defaults` function that adds the parameters | |
#: automatically again that were removed that way. | |
#: | |
#: .. versionadded:: 0.7 | |
self.url_default_functions = {} | |
#: A dictionary with list of functions that are called without argument | |
#: to populate the template context. The key of the dictionary is the | |
#: name of the blueprint this function is active for, ``None`` for all | |
#: requests. Each returns a dictionary that the template context is | |
#: updated with. To register a function here, use the | |
#: :meth:`context_processor` decorator. | |
self.template_context_processors = { | |
None: [_default_template_ctx_processor] | |
} | |
#: A list of shell context processor functions that should be run | |
#: when a shell context is created. | |
#: | |
#: .. versionadded:: 0.11 | |
self.shell_context_processors = [] | |
#: all the attached blueprints in a dictionary by name. Blueprints | |
#: can be attached multiple times so this dictionary does not tell | |
#: you how often they got attached. | |
#: | |
#: .. versionadded:: 0.7 | |
self.blueprints = {} | |
self._blueprint_order = [] | |
#: a place where extensions can store application specific state. For | |
#: example this is where an extension could store database engines and | |
#: similar things. For backwards compatibility extensions should register | |
#: themselves like this:: | |
#: | |
#: if not hasattr(app, 'extensions'): | |
#: app.extensions = {} | |
#: app.extensions['extensionname'] = SomeObject() | |
#: | |
#: The key must match the name of the extension module. For example in | |
#: case of a "Flask-Foo" extension in `flask_foo`, the key would be | |
#: ``'foo'``. | |
#: | |
#: .. versionadded:: 0.7 | |
self.extensions = {} | |
#: The :class:`~werkzeug.routing.Map` for this instance. You can use | |
#: this to change the routing converters after the class was created | |
#: but before any routes are connected. Example:: | |
#: | |
#: from werkzeug.routing import BaseConverter | |
#: | |
#: class ListConverter(BaseConverter): | |
#: def to_python(self, value): | |
#: return value.split(',') | |
#: def to_url(self, values): | |
#: return ','.join(super(ListConverter, self).to_url(value) | |
#: for value in values) | |
#: | |
#: app = Flask(__name__) | |
#: app.url_map.converters['list'] = ListConverter | |
self.url_map = Map() | |
self.url_map.host_matching = host_matching | |
self.subdomain_matching = subdomain_matching | |
# tracks internally if the application already handled at least one | |
# request. | |
self._got_first_request = False | |
self._before_request_lock = Lock() | |
# Add a static route using the provided static_url_path, static_host, | |
# and static_folder if there is a configured static_folder. | |
# Note we do this without checking if static_folder exists. | |
# For one, it might be created while the server is running (e.g. during | |
# development). Also, Google App Engine stores static files somewhere | |
if self.has_static_folder: | |
assert bool(static_host) == host_matching, 'Invalid static_host/host_matching combination' | |
self.add_url_rule( | |
self.static_url_path + '/<path:filename>', | |
endpoint='static', | |
host=static_host, | |
view_func=self.send_static_file | |
) | |
#: The click command line context for this application. Commands | |
#: registered here show up in the :command:`flask` command once the | |
#: application has been discovered. The default commands are | |
#: provided by Flask itself and can be overridden. | |
#: | |
#: This is an instance of a :class:`click.Group` object. | |
self.cli = cli.AppGroup(self.name) | |
@locked_cached_property | |
def name(self): | |
"""The name of the application. This is usually the import name | |
with the difference that it's guessed from the run file if the | |
import name is main. This name is used as a display name when | |
Flask needs the name of the application. It can be set and overridden | |
to change the value. | |
.. versionadded:: 0.8 | |
""" | |
if self.import_name == '__main__': | |
fn = getattr(sys.modules['__main__'], '__file__', None) | |
if fn is None: | |
return '__main__' | |
return os.path.splitext(os.path.basename(fn))[0] | |
return self.import_name | |
@property | |
def propagate_exceptions(self): | |
"""Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration | |
value in case it's set, otherwise a sensible default is returned. | |
.. versionadded:: 0.7 | |
""" | |
rv = self.config['PROPAGATE_EXCEPTIONS'] | |
if rv is not None: | |
return rv | |
return self.testing or self.debug | |
@property | |
def preserve_context_on_exception(self): | |
"""Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` | |
configuration value in case it's set, otherwise a sensible default | |
is returned. | |
.. versionadded:: 0.7 | |
""" | |
rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION'] | |
if rv is not None: | |
return rv | |
return self.debug | |
@locked_cached_property | |
def logger(self): | |
"""The ``'flask.app'`` logger, a standard Python | |
:class:`~logging.Logger`. | |
In debug mode, the logger's :attr:`~logging.Logger.level` will be set | |
to :data:`~logging.DEBUG`. | |
If there are no handlers configured, a default handler will be added. | |
See :ref:`logging` for more information. | |
.. versionchanged:: 1.0 | |
Behavior was simplified. The logger is always named | |
``flask.app``. The level is only set during configuration, it | |
doesn't check ``app.debug`` each time. Only one format is used, | |
not different ones depending on ``app.debug``. No handlers are | |
removed, and a handler is only added if no handlers are already | |
configured. | |
.. versionadded:: 0.3 | |
""" | |
return create_logger(self) | |
@locked_cached_property | |
def jinja_env(self): | |
"""The Jinja2 environment used to load templates.""" | |
return self.create_jinja_environment() | |
@property | |
def got_first_request(self): | |
"""This attribute is set to ``True`` if the application started | |
handling the first request. | |
.. versionadded:: 0.8 | |
""" | |
return self._got_first_request | |
def make_config(self, instance_relative=False): | |
"""Used to create the config attribute by the Flask constructor. | |
The `instance_relative` parameter is passed in from the constructor | |
of Flask (there named `instance_relative_config`) and indicates if | |
the config should be relative to the instance path or the root path | |
of the application. | |
.. versionadded:: 0.8 | |
""" | |
root_path = self.root_path | |
if instance_relative: | |
root_path = self.instance_path | |
defaults = dict(self.default_config) | |
defaults['ENV'] = get_env() | |
defaults['DEBUG'] = get_debug_flag() | |
return self.config_class(root_path, defaults) | |
def auto_find_instance_path(self): | |
"""Tries to locate the instance path if it was not provided to the | |
constructor of the application class. It will basically calculate | |
the path to a folder named ``instance`` next to your main file or | |
the package. | |
.. versionadded:: 0.8 | |
""" | |
prefix, package_path = find_package(self.import_name) | |
if prefix is None: | |
return os.path.join(package_path, 'instance') | |
return os.path.join(prefix, 'var', self.name + '-instance') | |
def open_instance_resource(self, resource, mode='rb'): | |
"""Opens a resource from the application's instance folder | |
(:attr:`instance_path`). Otherwise works like | |
:meth:`open_resource`. Instance resources can also be opened for | |
writing. | |
:param resource: the name of the resource. To access resources within | |
subfolders use forward slashes as separator. | |
:param mode: resource file opening mode, default is 'rb'. | |
""" | |
return open(os.path.join(self.instance_path, resource), mode) | |
def _get_templates_auto_reload(self): | |
"""Reload templates when they are changed. Used by | |
:meth:`create_jinja_environment`. | |
This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If | |
not set, it will be enabled in debug mode. | |
.. versionadded:: 1.0 | |
This property was added but the underlying config and behavior | |
already existed. | |
""" | |
rv = self.config['TEMPLATES_AUTO_RELOAD'] | |
return rv if rv is not None else self.debug | |
def _set_templates_auto_reload(self, value): | |
self.config['TEMPLATES_AUTO_RELOAD'] = value | |
templates_auto_reload = property( | |
_get_templates_auto_reload, _set_templates_auto_reload | |
) | |
del _get_templates_auto_reload, _set_templates_auto_reload | |
def create_jinja_environment(self): | |
"""Creates the Jinja2 environment based on :attr:`jinja_options` | |
and :meth:`select_jinja_autoescape`. Since 0.7 this also adds | |
the Jinja2 globals and filters after initialization. Override | |
this function to customize the behavior. | |
.. versionadded:: 0.5 | |
.. versionchanged:: 0.11 | |
``Environment.auto_reload`` set in accordance with | |
``TEMPLATES_AUTO_RELOAD`` configuration option. | |
""" | |
options = dict(self.jinja_options) | |
if 'autoescape' not in options: | |
options['autoescape'] = self.select_jinja_autoescape | |
if 'auto_reload' not in options: | |
options['auto_reload'] = self.templates_auto_reload | |
rv = self.jinja_environment(self, **options) | |
rv.globals.update( | |
url_for=url_for, | |
get_flashed_messages=get_flashed_messages, | |
config=self.config, | |
# request, session and g are normally added with the | |
# context processor for efficiency reasons but for imported | |
# templates we also want the proxies in there. | |
request=request, | |
session=session, | |
g=g | |
) | |
rv.filters['tojson'] = json.tojson_filter | |
return rv | |
def create_global_jinja_loader(self): | |
"""Creates the loader for the Jinja2 environment. Can be used to | |
override just the loader and keeping the rest unchanged. It's | |
discouraged to override this function. Instead one should override | |
the :meth:`jinja_loader` function instead. | |
The global loader dispatches between the loaders of the application | |
and the individual blueprints. | |
.. versionadded:: 0.7 | |
""" | |
return DispatchingJinjaLoader(self) | |
def select_jinja_autoescape(self, filename): | |
"""Returns ``True`` if autoescaping should be active for the given | |
template name. If no template name is given, returns `True`. | |
.. versionadded:: 0.5 | |
""" | |
if filename is None: | |
return True | |
return filename.endswith(('.html', '.htm', '.xml', '.xhtml')) | |
def update_template_context(self, context): | |
"""Update the template context with some commonly used variables. | |
This injects request, session, config and g into the template | |
context as well as everything template context processors want | |
to inject. Note that the as of Flask 0.6, the original values | |
in the context will not be overridden if a context processor | |
decides to return a value with the same key. | |
:param context: the context as a dictionary that is updated in place | |
to add extra variables. | |
""" | |
funcs = self.template_context_processors[None] | |
reqctx = _request_ctx_stack.top | |
if reqctx is not None: | |
bp = reqctx.request.blueprint | |
if bp is not None and bp in self.template_context_processors: | |
funcs = chain(funcs, self.template_context_processors[bp]) | |
orig_ctx = context.copy() | |
for func in funcs: | |
context.update(func()) | |
# make sure the original values win. This makes it possible to | |
# easier add new variables in context processors without breaking | |
# existing views. | |
context.update(orig_ctx) | |
def make_shell_context(self): | |
"""Returns the shell context for an interactive shell for this | |
application. This runs all the registered shell context | |
processors. | |
.. versionadded:: 0.11 | |
""" | |
rv = {'app': self, 'g': g} | |
for processor in self.shell_context_processors: | |
rv.update(processor()) | |
return rv | |
#: What environment the app is running in. Flask and extensions may | |
#: enable behaviors based on the environment, such as enabling debug | |
#: mode. This maps to the :data:`ENV` config key. This is set by the | |
#: :envvar:`FLASK_ENV` environment variable and may not behave as | |
#: expected if set in code. | |
#: | |
#: **Do not enable development when deploying in production.** | |
#: | |
#: Default: ``'production'`` | |
env = ConfigAttribute('ENV') | |
def _get_debug(self): | |
return self.config['DEBUG'] | |
def _set_debug(self, value): | |
self.config['DEBUG'] = value | |
self.jinja_env.auto_reload = self.templates_auto_reload | |
#: Whether debug mode is enabled. When using ``flask run`` to start | |
#: the development server, an interactive debugger will be shown for | |
#: unhandled exceptions, and the server will be reloaded when code | |
#: changes. This maps to the :data:`DEBUG` config key. This is | |
#: enabled when :attr:`env` is ``'development'`` and is overridden | |
#: by the ``FLASK_DEBUG`` environment variable. It may not behave as | |
#: expected if set in code. | |
#: | |
#: **Do not enable debug mode when deploying in production.** | |
#: | |
#: Default: ``True`` if :attr:`env` is ``'development'``, or | |
#: ``False`` otherwise. | |
debug = property(_get_debug, _set_debug) | |
del _get_debug, _set_debug | |
def run(self, host=None, port=None, debug=None, | |
load_dotenv=True, **options): | |
"""Runs the application on a local development server. | |
Do not use ``run()`` in a production setting. It is not intended to | |
meet security and performance requirements for a production server. | |
Instead, see :ref:`deployment` for WSGI server recommendations. | |
If the :attr:`debug` flag is set the server will automatically reload | |
for code changes and show a debugger in case an exception happened. | |
If you want to run the application in debug mode, but disable the | |
code execution on the interactive debugger, you can pass | |
``use_evalex=False`` as parameter. This will keep the debugger's | |
traceback screen active, but disable code execution. | |
It is not recommended to use this function for development with | |
automatic reloading as this is badly supported. Instead you should | |
be using the :command:`flask` command line script's ``run`` support. | |
.. admonition:: Keep in Mind | |
Flask will suppress any server error with a generic error page | |
unless it is in debug mode. As such to enable just the | |
interactive debugger without the code reloading, you have to | |
invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. | |
Setting ``use_debugger`` to ``True`` without being in debug mode | |
won't catch any exceptions because there won't be any to | |
catch. | |
:param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to | |
have the server available externally as well. Defaults to | |
``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable | |
if present. | |
:param port: the port of the webserver. Defaults to ``5000`` or the | |
port defined in the ``SERVER_NAME`` config variable if present. | |
:param debug: if given, enable or disable debug mode. See | |
:attr:`debug`. | |
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` | |
files to set environment variables. Will also change the working | |
directory to the directory containing the first file found. | |
:param options: the options to be forwarded to the underlying Werkzeug | |
server. See :func:`werkzeug.serving.run_simple` for more | |
information. | |
.. versionchanged:: 1.0 | |
If installed, python-dotenv will be used to load environment | |
variables from :file:`.env` and :file:`.flaskenv` files. | |
If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` | |
environment variables will override :attr:`env` and | |
:attr:`debug`. | |
Threaded mode is enabled by default. | |
.. versionchanged:: 0.10 | |
The default port is now picked from the ``SERVER_NAME`` | |
variable. | |
""" | |
# Change this into a no-op if the server is invoked from the | |
# command line. Have a look at cli.py for more information. | |
if os.environ.get('FLASK_RUN_FROM_CLI') == 'true': | |
from .debughelpers import explain_ignored_app_run | |
explain_ignored_app_run() | |
return | |
if get_load_dotenv(load_dotenv): | |
cli.load_dotenv() | |
# if set, let env vars override previous values | |
if 'FLASK_ENV' in os.environ: | |
self.env = get_env() | |
self.debug = get_debug_flag() | |
elif 'FLASK_DEBUG' in os.environ: | |
self.debug = get_debug_flag() | |
# debug passed to method overrides all other sources | |
if debug is not None: | |
self.debug = bool(debug) | |
_host = '127.0.0.1' | |
_port = 5000 | |
server_name = self.config.get('SERVER_NAME') | |
sn_host, sn_port = None, None | |
if server_name: | |
sn_host, _, sn_port = server_name.partition(':') | |
host = host or sn_host or _host | |
port = int(port or sn_port or _port) | |
options.setdefault('use_reloader', self.debug) | |
options.setdefault('use_debugger', self.debug) | |
options.setdefault('threaded', True) | |
cli.show_server_banner(self.env, self.debug, self.name, False) | |
from werkzeug.serving import run_simple | |
try: | |
run_simple(host, port, self, **options) | |
finally: | |
# reset the first request information if the development server | |
# reset normally. This makes it possible to restart the server | |
# without reloader and that stuff from an interactive shell. | |
self._got_first_request = False | |
def test_client(self, use_cookies=True, **kwargs): | |
"""Creates a test client for this application. For information | |
about unit testing head over to :ref:`testing`. | |
Note that if you are testing for assertions or exceptions in your | |
application code, you must set ``app.testing = True`` in order for the | |
exceptions to propagate to the test client. Otherwise, the exception | |
will be handled by the application (not visible to the test client) and | |
the only indication of an AssertionError or other exception will be a | |
500 status code response to the test client. See the :attr:`testing` | |
attribute. For example:: | |
app.testing = True | |
client = app.test_client() | |
The test client can be used in a ``with`` block to defer the closing down | |
of the context until the end of the ``with`` block. This is useful if | |
you want to access the context locals for testing:: | |
with app.test_client() as c: | |
rv = c.get('/?vodka=42') | |
assert request.args['vodka'] == '42' | |
Additionally, you may pass optional keyword arguments that will then | |
be passed to the application's :attr:`test_client_class` constructor. | |
For example:: | |
from flask.testing import FlaskClient | |
class CustomClient(FlaskClient): | |
def __init__(self, *args, **kwargs): | |
self._authentication = kwargs.pop("authentication") | |
super(CustomClient,self).__init__( *args, **kwargs) | |
app.test_client_class = CustomClient | |
client = app.test_client(authentication='Basic ....') | |
See :class:`~flask.testing.FlaskClient` for more information. | |
.. versionchanged:: 0.4 | |
added support for ``with`` block usage for the client. | |
.. versionadded:: 0.7 | |
The `use_cookies` parameter was added as well as the ability | |
to override the client to be used by setting the | |
:attr:`test_client_class` attribute. | |
.. versionchanged:: 0.11 | |
Added `**kwargs` to support passing additional keyword arguments to | |
the constructor of :attr:`test_client_class`. | |
""" | |
cls = self.test_client_class | |
if cls is None: | |
from flask.testing import FlaskClient as cls | |
return cls(self, self.response_class, use_cookies=use_cookies, **kwargs) | |
def test_cli_runner(self, **kwargs): | |
"""Create a CLI runner for testing CLI commands. | |
See :ref:`testing-cli`. | |
Returns an instance of :attr:`test_cli_runner_class`, by default | |
:class:`~flask.testing.FlaskCliRunner`. The Flask app object is | |
passed as the first argument. | |
.. versionadded:: 1.0 | |
""" | |
cls = self.test_cli_runner_class | |
if cls is None: | |
from flask.testing import FlaskCliRunner as cls | |
return cls(self, **kwargs) | |
def open_session(self, request): | |
"""Creates or opens a new session. Default implementation stores all | |
session data in a signed cookie. This requires that the | |
:attr:`secret_key` is set. Instead of overriding this method | |
we recommend replacing the :class:`session_interface`. | |
.. deprecated: 1.0 | |
Will be removed in 1.1. Use ``session_interface.open_session`` | |
instead. | |
:param request: an instance of :attr:`request_class`. | |
""" | |
warnings.warn(DeprecationWarning( | |
'"open_session" is deprecated and will be removed in 1.1. Use' | |
' "session_interface.open_session" instead.' | |
)) | |
return self.session_interface.open_session(self, request) | |
def save_session(self, session, response): | |
"""Saves the session if it needs updates. For the default | |
implementation, check :meth:`open_session`. Instead of overriding this | |
method we recommend replacing the :class:`session_interface`. | |
.. deprecated: 1.0 | |
Will be removed in 1.1. Use ``session_interface.save_session`` | |
instead. | |
:param session: the session to be saved (a | |
:class:`~werkzeug.contrib.securecookie.SecureCookie` | |
object) | |
:param response: an instance of :attr:`response_class` | |
""" | |
warnings.warn(DeprecationWarning( | |
'"save_session" is deprecated and will be removed in 1.1. Use' | |
' "session_interface.save_session" instead.' | |
)) | |
return self.session_interface.save_session(self, session, response) | |
def make_null_session(self): | |
"""Creates a new instance of a missing session. Instead of overriding | |
this method we recommend replacing the :class:`session_interface`. | |
.. deprecated: 1.0 | |
Will be removed in 1.1. Use ``session_interface.make_null_session`` | |
instead. | |
.. versionadded:: 0.7 | |
""" | |
warnings.warn(DeprecationWarning( | |
'"make_null_session" is deprecated and will be removed in 1.1. Use' | |
' "session_interface.make_null_session" instead.' | |
)) | |
return self.session_interface.make_null_session(self) | |
@setupmethod | |
def register_blueprint(self, blueprint, **options): | |
"""Register a :class:`~flask.Blueprint` on the application. Keyword | |
arguments passed to this method will override the defaults set on the | |
blueprint. | |
Calls the blueprint's :meth:`~flask.Blueprint.register` method after | |
recording the blueprint in the application's :attr:`blueprints`. | |
:param blueprint: The blueprint to register. | |
:param url_prefix: Blueprint routes will be prefixed with this. | |
:param subdomain: Blueprint routes will match on this subdomain. | |
:param url_defaults: Blueprint routes will use these default values for | |
view arguments. | |
:param options: Additional keyword arguments are passed to | |
:class:`~flask.blueprints.BlueprintSetupState`. They can be | |
accessed in :meth:`~flask.Blueprint.record` callbacks. | |
.. versionadded:: 0.7 | |
""" | |
first_registration = False | |
if blueprint.name in self.blueprints: | |
assert self.blueprints[blueprint.name] is blueprint, ( | |
'A name collision occurred between blueprints %r and %r. Both' | |
' share the same name "%s". Blueprints that are created on the' | |
' fly need unique names.' % ( | |
blueprint, self.blueprints[blueprint.name], blueprint.name | |
) | |
) | |
else: | |
self.blueprints[blueprint.name] = blueprint | |
self._blueprint_order.append(blueprint) | |
first_registration = True | |
blueprint.register(self, options, first_registration) | |
def iter_blueprints(self): | |
"""Iterates over all blueprints by the order they were registered. | |
.. versionadded:: 0.11 | |
""" | |
return iter(self._blueprint_order) | |
@setupmethod | |
def add_url_rule(self, rule, endpoint=None, view_func=None, | |
provide_automatic_options=None, **options): | |
"""Connects a URL rule. Works exactly like the :meth:`route` | |
decorator. If a view_func is provided it will be registered with the | |
endpoint. | |
Basically this example:: | |
@app.route('/') | |
def index(): | |
pass | |
Is equivalent to the following:: | |
def index(): | |
pass | |
app.add_url_rule('/', 'index', index) | |
If the view_func is not provided you will need to connect the endpoint | |
to a view function like so:: | |
app.view_functions['index'] = index | |
Internally :meth:`route` invokes :meth:`add_url_rule` so if you want | |
to customize the behavior via subclassing you only need to change | |
this method. | |
For more information refer to :ref:`url-route-registrations`. | |
.. versionchanged:: 0.2 | |
`view_func` parameter added. | |
.. versionchanged:: 0.6 | |
``OPTIONS`` is added automatically as method. | |
:param rule: the URL rule as string | |
:param endpoint: the endpoint for the registered URL rule. Flask | |
itself assumes the name of the view function as | |
endpoint | |
:param view_func: the function to call when serving a request to the | |
provided endpoint | |
:param provide_automatic_options: controls whether the ``OPTIONS`` | |
method should be added automatically. This can also be controlled | |
by setting the ``view_func.provide_automatic_options = False`` | |
before adding the rule. | |
:param options: the options to be forwarded to the underlying | |
:class:`~werkzeug.routing.Rule` object. A change | |
to Werkzeug is handling of method options. methods | |
is a list of methods this rule should be limited | |
to (``GET``, ``POST`` etc.). By default a rule | |
just listens for ``GET`` (and implicitly ``HEAD``). | |
Starting with Flask 0.6, ``OPTIONS`` is implicitly | |
added and handled by the standard request handling. | |
""" | |
if endpoint is None: | |
endpoint = _endpoint_from_view_func(view_func) | |
options['endpoint'] = endpoint | |
methods = options.pop('methods', None) | |
# if the methods are not given and the view_func object knows its | |
# methods we can use that instead. If neither exists, we go with | |
# a tuple of only ``GET`` as default. | |
if methods is None: | |
methods = getattr(view_func, 'methods', None) or ('GET',) | |
if isinstance(methods, string_types): | |
raise TypeError('Allowed methods have to be iterables of strings, ' | |
'for example: @app.route(..., methods=["POST"])') | |
methods = set(item.upper() for item in methods) | |
# Methods that should always be added | |
required_methods = set(getattr(view_func, 'required_methods', ())) | |
# starting with Flask 0.8 the view_func object can disable and | |
# force-enable the automatic options handling. | |
if provide_automatic_options is None: | |
provide_automatic_options = getattr(view_func, | |
'provide_automatic_options', None) | |
if provide_automatic_options is None: | |
if 'OPTIONS' not in methods: | |
provide_automatic_options = True | |
required_methods.add('OPTIONS') | |
else: | |
provide_automatic_options = False | |
# Add the required methods now. | |
methods |= required_methods | |
rule = self.url_rule_class(rule, methods=methods, **options) | |
rule.provide_automatic_options = provide_automatic_options | |
self.url_map.add(rule) | |
if view_func is not None: | |
old_func = self.view_functions.get(endpoint) | |
if old_func is not None and old_func != view_func: | |
raise AssertionError('View function mapping is overwriting an ' | |
'existing endpoint function: %s' % endpoint) | |
self.view_functions[endpoint] = view_func | |
def route(self, rule, **options): | |
"""A decorator that is used to register a view function for a | |
given URL rule. This does the same thing as :meth:`add_url_rule` | |
but is intended for decorator usage:: | |
@app.route('/') | |
def index(): | |
return 'Hello World' | |
For more information refer to :ref:`url-route-registrations`. | |
:param rule: the URL rule as string | |
:param endpoint: the endpoint for the registered URL rule. Flask | |
itself assumes the name of the view function as | |
endpoint | |
:param options: the options to be forwarded to the underlying | |
:class:`~werkzeug.routing.Rule` object. A change | |
to Werkzeug is handling of method options. methods | |
is a list of methods this rule should be limited | |
to (``GET``, ``POST`` etc.). By default a rule | |
just listens for ``GET`` (and implicitly ``HEAD``). | |
Starting with Flask 0.6, ``OPTIONS`` is implicitly | |
added and handled by the standard request handling. | |
""" | |
def decorator(f): | |
endpoint = options.pop('endpoint', None) | |
self.add_url_rule(rule, endpoint, f, **options) | |
return f | |
return decorator | |
@setupmethod | |
def endpoint(self, endpoint): | |
"""A decorator to register a function as an endpoint. | |
Example:: | |
@app.endpoint('example.endpoint') | |
def example(): | |
return "example" | |
:param endpoint: the name of the endpoint | |
""" | |
def decorator(f): | |
self.view_functions[endpoint] = f | |
return f | |
return decorator | |
@staticmethod | |
def _get_exc_class_and_code(exc_class_or_code): | |
"""Ensure that we register only exceptions as handler keys""" | |
if isinstance(exc_class_or_code, integer_types): | |
exc_class = default_exceptions[exc_class_or_code] | |
else: | |
exc_class = exc_class_or_code | |
assert issubclass(exc_class, Exception) | |
if issubclass(exc_class, HTTPException): | |
return exc_class, exc_class.code | |
else: | |
return exc_class, None | |
@setupmethod | |
def errorhandler(self, code_or_exception): | |
"""Register a function to handle errors by code or exception class. | |
A decorator that is used to register a function given an | |
error code. Example:: | |
@app.errorhandler(404) | |
def page_not_found(error): | |
return 'This page does not exist', 404 | |
You can also register handlers for arbitrary exceptions:: | |
@app.errorhandler(DatabaseError) | |
def special_exception_handler(error): | |
return 'Database connection failed', 500 | |
.. versionadded:: 0.7 | |
Use :meth:`register_error_handler` instead of modifying | |
:attr:`error_handler_spec` directly, for application wide error | |
handlers. | |
.. versionadded:: 0.7 | |
One can now additionally also register custom exception types | |
that do not necessarily have to be a subclass of the | |
:class:`~werkzeug.exceptions.HTTPException` class. | |
:param code_or_exception: the code as integer for the handler, or | |
an arbitrary exception | |
""" | |
def decorator(f): | |
self._register_error_handler(None, code_or_exception, f) | |
return f | |
return decorator | |
@setupmethod | |
def register_error_handler(self, code_or_exception, f): | |
"""Alternative error attach function to the :meth:`errorhandler` | |
decorator that is more straightforward to use for non decorator | |
usage. | |
.. versionadded:: 0.7 | |
""" | |
self._register_error_handler(None, code_or_exception, f) | |
@setupmethod | |
def _register_error_handler(self, key, code_or_exception, f): | |
""" | |
:type key: None|str | |
:type code_or_exception: int|T<=Exception | |
:type f: callable | |
""" | |
if isinstance(code_or_exception, HTTPException): # old broken behavior | |
raise ValueError( | |
'Tried to register a handler for an exception instance {0!r}.' | |
' Handlers can only be registered for exception classes or' | |
' HTTP error codes.'.format(code_or_exception) | |
) | |
try: | |
exc_class, code = self._get_exc_class_and_code(code_or_exception) | |
except KeyError: | |
raise KeyError( | |
"'{0}' is not a recognized HTTP error code. Use a subclass of" | |
" HTTPException with that code instead.".format(code_or_exception) | |
) | |
handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {}) | |
handlers[exc_class] = f | |
@setupmethod | |
def template_filter(self, name=None): | |
"""A decorator that is used to register custom template filter. | |
You can specify a name for the filter, otherwise the function | |
name will be used. Example:: | |
@app.template_filter() | |
def reverse(s): | |
return s[::-1] | |
:param name: the optional name of the filter, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_template_filter(f, name=name) | |
return f | |
return decorator | |
@setupmethod | |
def add_template_filter(self, f, name=None): | |
"""Register a custom template filter. Works exactly like the | |
:meth:`template_filter` decorator. | |
:param name: the optional name of the filter, otherwise the | |
function name will be used. | |
""" | |
self.jinja_env.filters[name or f.__name__] = f | |
@setupmethod | |
def template_test(self, name=None): | |
"""A decorator that is used to register custom template test. | |
You can specify a name for the test, otherwise the function | |
name will be used. Example:: | |
@app.template_test() | |
def is_prime(n): | |
if n == 2: | |
return True | |
for i in range(2, int(math.ceil(math.sqrt(n))) + 1): | |
if n % i == 0: | |
return False | |
return True | |
.. versionadded:: 0.10 | |
:param name: the optional name of the test, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_template_test(f, name=name) | |
return f | |
return decorator | |
@setupmethod | |
def add_template_test(self, f, name=None): | |
"""Register a custom template test. Works exactly like the | |
:meth:`template_test` decorator. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the test, otherwise the | |
function name will be used. | |
""" | |
self.jinja_env.tests[name or f.__name__] = f | |
@setupmethod | |
def template_global(self, name=None): | |
"""A decorator that is used to register a custom template global function. | |
You can specify a name for the global function, otherwise the function | |
name will be used. Example:: | |
@app.template_global() | |
def double(n): | |
return 2 * n | |
.. versionadded:: 0.10 | |
:param name: the optional name of the global function, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_template_global(f, name=name) | |
return f | |
return decorator | |
@setupmethod | |
def add_template_global(self, f, name=None): | |
"""Register a custom template global function. Works exactly like the | |
:meth:`template_global` decorator. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the global function, otherwise the | |
function name will be used. | |
""" | |
self.jinja_env.globals[name or f.__name__] = f | |
@setupmethod | |
def before_request(self, f): | |
"""Registers a function to run before each request. | |
For example, this can be used to open a database connection, or to load | |
the logged in user from the session. | |
The function will be called without any arguments. If it returns a | |
non-None value, the value is handled as if it was the return value from | |
the view, and further request handling is stopped. | |
""" | |
self.before_request_funcs.setdefault(None, []).append(f) | |
return f | |
@setupmethod | |
def before_first_request(self, f): | |
"""Registers a function to be run before the first request to this | |
instance of the application. | |
The function will be called without any arguments and its return | |
value is ignored. | |
.. versionadded:: 0.8 | |
""" | |
self.before_first_request_funcs.append(f) | |
return f | |
@setupmethod | |
def after_request(self, f): | |
"""Register a function to be run after each request. | |
Your function must take one parameter, an instance of | |
:attr:`response_class` and return a new response object or the | |
same (see :meth:`process_response`). | |
As of Flask 0.7 this function might not be executed at the end of the | |
request in case an unhandled exception occurred. | |
""" | |
self.after_request_funcs.setdefault(None, []).append(f) | |
return f | |
@setupmethod | |
def teardown_request(self, f): | |
"""Register a function to be run at the end of each request, | |
regardless of whether there was an exception or not. These functions | |
are executed when the request context is popped, even if not an | |
actual request was performed. | |
Example:: | |
ctx = app.test_request_context() | |
ctx.push() | |
... | |
ctx.pop() | |
When ``ctx.pop()`` is executed in the above example, the teardown | |
functions are called just before the request context moves from the | |
stack of active contexts. This becomes relevant if you are using | |
such constructs in tests. | |
Generally teardown functions must take every necessary step to avoid | |
that they will fail. If they do execute code that might fail they | |
will have to surround the execution of these code by try/except | |
statements and log occurring errors. | |
When a teardown function was called because of an exception it will | |
be passed an error object. | |
The return values of teardown functions are ignored. | |
.. admonition:: Debug Note | |
In debug mode Flask will not tear down a request on an exception | |
immediately. Instead it will keep it alive so that the interactive | |
debugger can still access it. This behavior can be controlled | |
by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. | |
""" | |
self.teardown_request_funcs.setdefault(None, []).append(f) | |
return f | |
@setupmethod | |
def teardown_appcontext(self, f): | |
"""Registers a function to be called when the application context | |
ends. These functions are typically also called when the request | |
context is popped. | |
Example:: | |
ctx = app.app_context() | |
ctx.push() | |
... | |
ctx.pop() | |
When ``ctx.pop()`` is executed in the above example, the teardown | |
functions are called just before the app context moves from the | |
stack of active contexts. This becomes relevant if you are using | |
such constructs in tests. | |
Since a request context typically also manages an application | |
context it would also be called when you pop a request context. | |
When a teardown function was called because of an unhandled exception | |
it will be passed an error object. If an :meth:`errorhandler` is | |
registered, it will handle the exception and the teardown will not | |
receive it. | |
The return values of teardown functions are ignored. | |
.. versionadded:: 0.9 | |
""" | |
self.teardown_appcontext_funcs.append(f) | |
return f | |
@setupmethod | |
def context_processor(self, f): | |
"""Registers a template context processor function.""" | |
self.template_context_processors[None].append(f) | |
return f | |
@setupmethod | |
def shell_context_processor(self, f): | |
"""Registers a shell context processor function. | |
.. versionadded:: 0.11 | |
""" | |
self.shell_context_processors.append(f) | |
return f | |
@setupmethod | |
def url_value_preprocessor(self, f): | |
"""Register a URL value preprocessor function for all view | |
functions in the application. These functions will be called before the | |
:meth:`before_request` functions. | |
The function can modify the values captured from the matched url before | |
they are passed to the view. For example, this can be used to pop a | |
common language code value and place it in ``g`` rather than pass it to | |
every view. | |
The function is passed the endpoint name and values dict. The return | |
value is ignored. | |
""" | |
self.url_value_preprocessors.setdefault(None, []).append(f) | |
return f | |
@setupmethod | |
def url_defaults(self, f): | |
"""Callback function for URL defaults for all view functions of the | |
application. It's called with the endpoint and values and should | |
update the values passed in place. | |
""" | |
self.url_default_functions.setdefault(None, []).append(f) | |
return f | |
def _find_error_handler(self, e): | |
"""Return a registered error handler for an exception in this order: | |
blueprint handler for a specific code, app handler for a specific code, | |
blueprint handler for an exception class, app handler for an exception | |
class, or ``None`` if a suitable handler is not found. | |
""" | |
exc_class, code = self._get_exc_class_and_code(type(e)) | |
for name, c in ( | |
(request.blueprint, code), (None, code), | |
(request.blueprint, None), (None, None) | |
): | |
handler_map = self.error_handler_spec.setdefault(name, {}).get(c) | |
if not handler_map: | |
continue | |
for cls in exc_class.__mro__: | |
handler = handler_map.get(cls) | |
if handler is not None: | |
return handler | |
def handle_http_exception(self, e): | |
"""Handles an HTTP exception. By default this will invoke the | |
registered error handlers and fall back to returning the | |
exception as response. | |
.. versionadded:: 0.3 | |
""" | |
# Proxy exceptions don't have error codes. We want to always return | |
# those unchanged as errors | |
if e.code is None: | |
return e | |
handler = self._find_error_handler(e) | |
if handler is None: | |
return e | |
return handler(e) | |
def trap_http_exception(self, e): | |
"""Checks if an HTTP exception should be trapped or not. By default | |
this will return ``False`` for all exceptions except for a bad request | |
key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It | |
also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. | |
This is called for all HTTP exceptions raised by a view function. | |
If it returns ``True`` for any exception the error handler for this | |
exception is not called and it shows up as regular exception in the | |
traceback. This is helpful for debugging implicitly raised HTTP | |
exceptions. | |
.. versionchanged:: 1.0 | |
Bad request errors are not trapped by default in debug mode. | |
.. versionadded:: 0.8 | |
""" | |
if self.config['TRAP_HTTP_EXCEPTIONS']: | |
return True | |
trap_bad_request = self.config['TRAP_BAD_REQUEST_ERRORS'] | |
# if unset, trap key errors in debug mode | |
if ( | |
trap_bad_request is None and self.debug | |
and isinstance(e, BadRequestKeyError) | |
): | |
return True | |
if trap_bad_request: | |
return isinstance(e, BadRequest) | |
return False | |
def handle_user_exception(self, e): | |
"""This method is called whenever an exception occurs that should be | |
handled. A special case are | |
:class:`~werkzeug.exception.HTTPException`\s which are forwarded by | |
this function to the :meth:`handle_http_exception` method. This | |
function will either return a response value or reraise the | |
exception with the same traceback. | |
.. versionchanged:: 1.0 | |
Key errors raised from request data like ``form`` show the the bad | |
key in debug mode rather than a generic bad request message. | |
.. versionadded:: 0.7 | |
""" | |
exc_type, exc_value, tb = sys.exc_info() | |
assert exc_value is e | |
# ensure not to trash sys.exc_info() at that point in case someone | |
# wants the traceback preserved in handle_http_exception. Of course | |
# we cannot prevent users from trashing it themselves in a custom | |
# trap_http_exception method so that's their fault then. | |
# MultiDict passes the key to the exception, but that's ignored | |
# when generating the response message. Set an informative | |
# description for key errors in debug mode or when trapping errors. | |
if ( | |
(self.debug or self.config['TRAP_BAD_REQUEST_ERRORS']) | |
and isinstance(e, BadRequestKeyError) | |
# only set it if it's still the default description | |
and e.description is BadRequestKeyError.description | |
): | |
e.description = "KeyError: '{0}'".format(*e.args) | |
if isinstance(e, HTTPException) and not self.trap_http_exception(e): | |
return self.handle_http_exception(e) | |
handler = self._find_error_handler(e) | |
if handler is None: | |
reraise(exc_type, exc_value, tb) | |
return handler(e) | |
def handle_exception(self, e): | |
"""Default exception handling that kicks in when an exception | |
occurs that is not caught. In debug mode the exception will | |
be re-raised immediately, otherwise it is logged and the handler | |
for a 500 internal server error is used. If no such handler | |
exists, a default 500 internal server error message is displayed. | |
.. versionadded:: 0.3 | |
""" | |
exc_type, exc_value, tb = sys.exc_info() | |
got_request_exception.send(self, exception=e) | |
handler = self._find_error_handler(InternalServerError()) | |
if self.propagate_exceptions: | |
# if we want to repropagate the exception, we can attempt to | |
# raise it with the whole traceback in case we can do that | |
# (the function was actually called from the except part) | |
# otherwise, we just raise the error again | |
if exc_value is e: | |
reraise(exc_type, exc_value, tb) | |
else: | |
raise e | |
self.log_exception((exc_type, exc_value, tb)) | |
if handler is None: | |
return InternalServerError() | |
return self.finalize_request(handler(e), from_error_handler=True) | |
def log_exception(self, exc_info): | |
"""Logs an exception. This is called by :meth:`handle_exception` | |
if debugging is disabled and right before the handler is called. | |
The default implementation logs the exception as error on the | |
:attr:`logger`. | |
.. versionadded:: 0.8 | |
""" | |
self.logger.error('Exception on %s [%s]' % ( | |
request.path, | |
request.method | |
), exc_info=exc_info) | |
def raise_routing_exception(self, request): | |
"""Exceptions that are recording during routing are reraised with | |
this method. During debug we are not reraising redirect requests | |
for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising | |
a different error instead to help debug situations. | |
:internal: | |
""" | |
if not self.debug \ | |
or not isinstance(request.routing_exception, RequestRedirect) \ | |
or request.method in ('GET', 'HEAD', 'OPTIONS'): | |
raise request.routing_exception | |
from .debughelpers import FormDataRoutingRedirect | |
raise FormDataRoutingRedirect(request) | |
def dispatch_request(self): | |
"""Does the request dispatching. Matches the URL and returns the | |
return value of the view or error handler. This does not have to | |
be a response object. In order to convert the return value to a | |
proper response object, call :func:`make_response`. | |
.. versionchanged:: 0.7 | |
This no longer does the exception handling, this code was | |
moved to the new :meth:`full_dispatch_request`. | |
""" | |
req = _request_ctx_stack.top.request | |
if req.routing_exception is not None: | |
self.raise_routing_exception(req) | |
rule = req.url_rule | |
# if we provide automatic options for this URL and the | |
# request came with the OPTIONS method, reply automatically | |
if getattr(rule, 'provide_automatic_options', False) \ | |
and req.method == 'OPTIONS': | |
return self.make_default_options_response() | |
# otherwise dispatch to the handler for that endpoint | |
return self.view_functions[rule.endpoint](**req.view_args) | |
def full_dispatch_request(self): | |
"""Dispatches the request and on top of that performs request | |
pre and postprocessing as well as HTTP exception catching and | |
error handling. | |
.. versionadded:: 0.7 | |
""" | |
self.try_trigger_before_first_request_functions() | |
try: | |
request_started.send(self) | |
rv = self.preprocess_request() | |
if rv is None: | |
rv = self.dispatch_request() | |
except Exception as e: | |
rv = self.handle_user_exception(e) | |
return self.finalize_request(rv) | |
def finalize_request(self, rv, from_error_handler=False): | |
"""Given the return value from a view function this finalizes | |
the request by converting it into a response and invoking the | |
postprocessing functions. This is invoked for both normal | |
request dispatching as well as error handlers. | |
Because this means that it might be called as a result of a | |
failure a special safe mode is available which can be enabled | |
with the `from_error_handler` flag. If enabled, failures in | |
response processing will be logged and otherwise ignored. | |
:internal: | |
""" | |
response = self.make_response(rv) | |
try: | |
response = self.process_response(response) | |
request_finished.send(self, response=response) | |
except Exception: | |
if not from_error_handler: | |
raise | |
self.logger.exception('Request finalizing failed with an ' | |
'error while handling an error') | |
return response | |
def try_trigger_before_first_request_functions(self): | |
"""Called before each request and will ensure that it triggers | |
the :attr:`before_first_request_funcs` and only exactly once per | |
application instance (which means process usually). | |
:internal: | |
""" | |
if self._got_first_request: | |
return | |
with self._before_request_lock: | |
if self._got_first_request: | |
return | |
for func in self.before_first_request_funcs: | |
func() | |
self._got_first_request = True | |
def make_default_options_response(self): | |
"""This method is called to create the default ``OPTIONS`` response. | |
This can be changed through subclassing to change the default | |
behavior of ``OPTIONS`` responses. | |
.. versionadded:: 0.7 | |
""" | |
adapter = _request_ctx_stack.top.url_adapter | |
if hasattr(adapter, 'allowed_methods'): | |
methods = adapter.allowed_methods() | |
else: | |
# fallback for Werkzeug < 0.7 | |
methods = [] | |
try: | |
adapter.match(method='--') | |
except MethodNotAllowed as e: | |
methods = e.valid_methods | |
except HTTPException as e: | |
pass | |
rv = self.response_class() | |
rv.allow.update(methods) | |
return rv | |
def should_ignore_error(self, error): | |
"""This is called to figure out if an error should be ignored | |
or not as far as the teardown system is concerned. If this | |
function returns ``True`` then the teardown handlers will not be | |
passed the error. | |
.. versionadded:: 0.10 | |
""" | |
return False | |
def make_response(self, rv): | |
"""Convert the return value from a view function to an instance of | |
:attr:`response_class`. | |
:param rv: the return value from the view function. The view function | |
must return a response. Returning ``None``, or the view ending | |
without returning, is not allowed. The following types are allowed | |
for ``view_rv``: | |
``str`` (``unicode`` in Python 2) | |
A response object is created with the string encoded to UTF-8 | |
as the body. | |
``bytes`` (``str`` in Python 2) | |
A response object is created with the bytes as the body. | |
``tuple`` | |
Either ``(body, status, headers)``, ``(body, status)``, or | |
``(body, headers)``, where ``body`` is any of the other types | |
allowed here, ``status`` is a string or an integer, and | |
``headers`` is a dictionary or a list of ``(key, value)`` | |
tuples. If ``body`` is a :attr:`response_class` instance, | |
``status`` overwrites the exiting value and ``headers`` are | |
extended. | |
:attr:`response_class` | |
The object is returned unchanged. | |
other :class:`~werkzeug.wrappers.Response` class | |
The object is coerced to :attr:`response_class`. | |
:func:`callable` | |
The function is called as a WSGI application. The result is | |
used to create a response object. | |
.. versionchanged:: 0.9 | |
Previously a tuple was interpreted as the arguments for the | |
response object. | |
""" | |
status = headers = None | |
# unpack tuple returns | |
if isinstance(rv, tuple): | |
len_rv = len(rv) | |
# a 3-tuple is unpacked directly | |
if len_rv == 3: | |
rv, status, headers = rv | |
# decide if a 2-tuple has status or headers | |
elif len_rv == 2: | |
if isinstance(rv[1], (Headers, dict, tuple, list)): | |
rv, headers = rv | |
else: | |
rv, status = rv | |
# other sized tuples are not allowed | |
else: | |
raise TypeError( | |
'The view function did not return a valid response tuple.' | |
' The tuple must have the form (body, status, headers),' | |
' (body, status), or (body, headers).' | |
) | |
# the body must not be None | |
if rv is None: | |
raise TypeError( | |
'The view function did not return a valid response. The' | |
' function either returned None or ended without a return' | |
' statement.' | |
) | |
# make sure the body is an instance of the response class | |
if not isinstance(rv, self.response_class): | |
if isinstance(rv, (text_type, bytes, bytearray)): | |
# let the response class set the status and headers instead of | |
# waiting to do it manually, so that the class can handle any | |
# special logic | |
rv = self.response_class(rv, status=status, headers=headers) | |
status = headers = None | |
else: | |
# evaluate a WSGI callable, or coerce a different response | |
# class to the correct type | |
try: | |
rv = self.response_class.force_type(rv, request.environ) | |
except TypeError as e: | |
new_error = TypeError( | |
'{e}\nThe view function did not return a valid' | |
' response. The return type must be a string, tuple,' | |
' Response instance, or WSGI callable, but it was a' | |
' {rv.__class__.__name__}.'.format(e=e, rv=rv) | |
) | |
reraise(TypeError, new_error, sys.exc_info()[2]) | |
# prefer the status if it was provided | |
if status is not None: | |
if isinstance(status, (text_type, bytes, bytearray)): | |
rv.status = status | |
else: | |
rv.status_code = status | |
# extend existing headers with provided headers | |
if headers: | |
rv.headers.extend(headers) | |
return rv | |
def create_url_adapter(self, request): | |
"""Creates a URL adapter for the given request. The URL adapter | |
is created at a point where the request context is not yet set | |
up so the request is passed explicitly. | |
.. versionadded:: 0.6 | |
.. versionchanged:: 0.9 | |
This can now also be called without a request object when the | |
URL adapter is created for the application context. | |
.. versionchanged:: 1.0 | |
:data:`SERVER_NAME` no longer implicitly enables subdomain | |
matching. Use :attr:`subdomain_matching` instead. | |
""" | |
if request is not None: | |
# If subdomain matching is disabled (the default), use the | |
# default subdomain in all cases. This should be the default | |
# in Werkzeug but it currently does not have that feature. | |
subdomain = ((self.url_map.default_subdomain or None) | |
if not self.subdomain_matching else None) | |
return self.url_map.bind_to_environ( | |
request.environ, | |
server_name=self.config['SERVER_NAME'], | |
subdomain=subdomain) | |
# We need at the very least the server name to be set for this | |
# to work. | |
if self.config['SERVER_NAME'] is not None: | |
return self.url_map.bind( | |
self.config['SERVER_NAME'], | |
script_name=self.config['APPLICATION_ROOT'], | |
url_scheme=self.config['PREFERRED_URL_SCHEME']) | |
def inject_url_defaults(self, endpoint, values): | |
"""Injects the URL defaults for the given endpoint directly into | |
the values dictionary passed. This is used internally and | |
automatically called on URL building. | |
.. versionadded:: 0.7 | |
""" | |
funcs = self.url_default_functions.get(None, ()) | |
if '.' in endpoint: | |
bp = endpoint.rsplit('.', 1)[0] | |
funcs = chain(funcs, self.url_default_functions.get(bp, ())) | |
for func in funcs: | |
func(endpoint, values) | |
def handle_url_build_error(self, error, endpoint, values): | |
"""Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. | |
""" | |
exc_type, exc_value, tb = sys.exc_info() | |
for handler in self.url_build_error_handlers: | |
try: | |
rv = handler(error, endpoint, values) | |
if rv is not None: | |
return rv | |
except BuildError as e: | |
# make error available outside except block (py3) | |
error = e | |
# At this point we want to reraise the exception. If the error is | |
# still the same one we can reraise it with the original traceback, | |
# otherwise we raise it from here. | |
if error is exc_value: | |
reraise(exc_type, exc_value, tb) | |
raise error | |
def preprocess_request(self): | |
"""Called before the request is dispatched. Calls | |
:attr:`url_value_preprocessors` registered with the app and the | |
current blueprint (if any). Then calls :attr:`before_request_funcs` | |
registered with the app and the blueprint. | |
If any :meth:`before_request` handler returns a non-None value, the | |
value is handled as if it was the return value from the view, and | |
further request handling is stopped. | |
""" | |
bp = _request_ctx_stack.top.request.blueprint | |
funcs = self.url_value_preprocessors.get(None, ()) | |
if bp is not None and bp in self.url_value_preprocessors: | |
funcs = chain(funcs, self.url_value_preprocessors[bp]) | |
for func in funcs: | |
func(request.endpoint, request.view_args) | |
funcs = self.before_request_funcs.get(None, ()) | |
if bp is not None and bp in self.before_request_funcs: | |
funcs = chain(funcs, self.before_request_funcs[bp]) | |
for func in funcs: | |
rv = func() | |
if rv is not None: | |
return rv | |
def process_response(self, response): | |
"""Can be overridden in order to modify the response object | |
before it's sent to the WSGI server. By default this will | |
call all the :meth:`after_request` decorated functions. | |
.. versionchanged:: 0.5 | |
As of Flask 0.5 the functions registered for after request | |
execution are called in reverse order of registration. | |
:param response: a :attr:`response_class` object. | |
:return: a new response object or the same, has to be an | |
instance of :attr:`response_class`. | |
""" | |
ctx = _request_ctx_stack.top | |
bp = ctx.request.blueprint | |
funcs = ctx._after_request_functions | |
if bp is not None and bp in self.after_request_funcs: | |
funcs = chain(funcs, reversed(self.after_request_funcs[bp])) | |
if None in self.after_request_funcs: | |
funcs = chain(funcs, reversed(self.after_request_funcs[None])) | |
for handler in funcs: | |
response = handler(response) | |
if not self.session_interface.is_null_session(ctx.session): | |
self.session_interface.save_session(self, ctx.session, response) | |
return response | |
def do_teardown_request(self, exc=_sentinel): | |
"""Called after the request is dispatched and the response is | |
returned, right before the request context is popped. | |
This calls all functions decorated with | |
:meth:`teardown_request`, and :meth:`Blueprint.teardown_request` | |
if a blueprint handled the request. Finally, the | |
:data:`request_tearing_down` signal is sent. | |
This is called by | |
:meth:`RequestContext.pop() <flask.ctx.RequestContext.pop>`, | |
which may be delayed during testing to maintain access to | |
resources. | |
:param exc: An unhandled exception raised while dispatching the | |
request. Detected from the current exception information if | |
not passed. Passed to each teardown function. | |
.. versionchanged:: 0.9 | |
Added the ``exc`` argument. | |
""" | |
if exc is _sentinel: | |
exc = sys.exc_info()[1] | |
funcs = reversed(self.teardown_request_funcs.get(None, ())) | |
bp = _request_ctx_stack.top.request.blueprint | |
if bp is not None and bp in self.teardown_request_funcs: | |
funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) | |
for func in funcs: | |
func(exc) | |
request_tearing_down.send(self, exc=exc) | |
def do_teardown_appcontext(self, exc=_sentinel): | |
"""Called right before the application context is popped. | |
When handling a request, the application context is popped | |
after the request context. See :meth:`do_teardown_request`. | |
This calls all functions decorated with | |
:meth:`teardown_appcontext`. Then the | |
:data:`appcontext_tearing_down` signal is sent. | |
This is called by | |
:meth:`AppContext.pop() <flask.ctx.AppContext.pop>`. | |
.. versionadded:: 0.9 | |
""" | |
if exc is _sentinel: | |
exc = sys.exc_info()[1] | |
for func in reversed(self.teardown_appcontext_funcs): | |
func(exc) | |
appcontext_tearing_down.send(self, exc=exc) | |
def app_context(self): | |
"""Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` | |
block to push the context, which will make :data:`current_app` | |
point at this application. | |
An application context is automatically pushed by | |
:meth:`RequestContext.push() <flask.ctx.RequestContext.push>` | |
when handling a request, and when running a CLI command. Use | |
this to manually create a context outside of these situations. | |
:: | |
with app.app_context(): | |
init_db() | |
See :doc:`/appcontext`. | |
.. versionadded:: 0.9 | |
""" | |
return AppContext(self) | |
def request_context(self, environ): | |
"""Create a :class:`~flask.ctx.RequestContext` representing a | |
WSGI environment. Use a ``with`` block to push the context, | |
which will make :data:`request` point at this request. | |
See :doc:`/reqcontext`. | |
Typically you should not call this from your own code. A request | |
context is automatically pushed by the :meth:`wsgi_app` when | |
handling a request. Use :meth:`test_request_context` to create | |
an environment and context instead of this method. | |
:param environ: a WSGI environment | |
""" | |
return RequestContext(self, environ) | |
def test_request_context(self, *args, **kwargs): | |
"""Create a :class:`~flask.ctx.RequestContext` for a WSGI | |
environment created from the given values. This is mostly useful | |
during testing, where you may want to run a function that uses | |
request data without dispatching a full request. | |
See :doc:`/reqcontext`. | |
Use a ``with`` block to push the context, which will make | |
:data:`request` point at the request for the created | |
environment. :: | |
with test_request_context(...): | |
generate_report() | |
When using the shell, it may be easier to push and pop the | |
context manually to avoid indentation. :: | |
ctx = app.test_request_context(...) | |
ctx.push() | |
... | |
ctx.pop() | |
Takes the same arguments as Werkzeug's | |
:class:`~werkzeug.test.EnvironBuilder`, with some defaults from | |
the application. See the linked Werkzeug docs for most of the | |
available arguments. Flask-specific behavior is listed here. | |
:param path: URL path being requested. | |
:param base_url: Base URL where the app is being served, which | |
``path`` is relative to. If not given, built from | |
:data:`PREFERRED_URL_SCHEME`, ``subdomain``, | |
:data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. | |
:param subdomain: Subdomain name to append to | |
:data:`SERVER_NAME`. | |
:param url_scheme: Scheme to use instead of | |
:data:`PREFERRED_URL_SCHEME`. | |
:param data: The request body, either as a string or a dict of | |
form keys and values. | |
:param json: If given, this is serialized as JSON and passed as | |
``data``. Also defaults ``content_type`` to | |
``application/json``. | |
:param args: other positional arguments passed to | |
:class:`~werkzeug.test.EnvironBuilder`. | |
:param kwargs: other keyword arguments passed to | |
:class:`~werkzeug.test.EnvironBuilder`. | |
""" | |
from flask.testing import make_test_environ_builder | |
builder = make_test_environ_builder(self, *args, **kwargs) | |
try: | |
return self.request_context(builder.get_environ()) | |
finally: | |
builder.close() | |
def wsgi_app(self, environ, start_response): | |
"""The actual WSGI application. This is not implemented in | |
:meth:`__call__` so that middlewares can be applied without | |
losing a reference to the app object. Instead of doing this:: | |
app = MyMiddleware(app) | |
It's a better idea to do this instead:: | |
app.wsgi_app = MyMiddleware(app.wsgi_app) | |
Then you still have the original application object around and | |
can continue to call methods on it. | |
.. versionchanged:: 0.7 | |
Teardown events for the request and app contexts are called | |
even if an unhandled error occurs. Other events may not be | |
called depending on when an error occurs during dispatch. | |
See :ref:`callbacks-and-errors`. | |
:param environ: A WSGI environment. | |
:param start_response: A callable accepting a status code, | |
a list of headers, and an optional exception context to | |
start the response. | |
""" | |
ctx = self.request_context(environ) | |
error = None | |
try: | |
try: | |
ctx.push() | |
response = self.full_dispatch_request() | |
except Exception as e: | |
error = e | |
response = self.handle_exception(e) | |
except: | |
error = sys.exc_info()[1] | |
raise | |
return response(environ, start_response) | |
finally: | |
if self.should_ignore_error(error): | |
error = None | |
ctx.auto_pop(error) | |
def __call__(self, environ, start_response): | |
"""The WSGI server calls the Flask application object as the | |
WSGI application. This calls :meth:`wsgi_app` which can be | |
wrapped to applying middleware.""" | |
return self.wsgi_app(environ, start_response) | |
def __repr__(self): | |
return '<%s %r>' % ( | |
self.__class__.__name__, | |
self.name, | |
) |
# -*- coding: utf-8 -*- | |
""" | |
flask.blueprints | |
~~~~~~~~~~~~~~~~ | |
Blueprints are the recommended way to implement larger or more | |
pluggable applications in Flask 0.7 and later. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from functools import update_wrapper | |
from werkzeug.urls import url_join | |
from .helpers import _PackageBoundObject, _endpoint_from_view_func | |
class BlueprintSetupState(object): | |
"""Temporary holder object for registering a blueprint with the | |
application. An instance of this class is created by the | |
:meth:`~flask.Blueprint.make_setup_state` method and later passed | |
to all register callback functions. | |
""" | |
def __init__(self, blueprint, app, options, first_registration): | |
#: a reference to the current application | |
self.app = app | |
#: a reference to the blueprint that created this setup state. | |
self.blueprint = blueprint | |
#: a dictionary with all options that were passed to the | |
#: :meth:`~flask.Flask.register_blueprint` method. | |
self.options = options | |
#: as blueprints can be registered multiple times with the | |
#: application and not everything wants to be registered | |
#: multiple times on it, this attribute can be used to figure | |
#: out if the blueprint was registered in the past already. | |
self.first_registration = first_registration | |
subdomain = self.options.get('subdomain') | |
if subdomain is None: | |
subdomain = self.blueprint.subdomain | |
#: The subdomain that the blueprint should be active for, ``None`` | |
#: otherwise. | |
self.subdomain = subdomain | |
url_prefix = self.options.get('url_prefix') | |
if url_prefix is None: | |
url_prefix = self.blueprint.url_prefix | |
#: The prefix that should be used for all URLs defined on the | |
#: blueprint. | |
self.url_prefix = url_prefix | |
#: A dictionary with URL defaults that is added to each and every | |
#: URL that was defined with the blueprint. | |
self.url_defaults = dict(self.blueprint.url_values_defaults) | |
self.url_defaults.update(self.options.get('url_defaults', ())) | |
def add_url_rule(self, rule, endpoint=None, view_func=None, **options): | |
"""A helper method to register a rule (and optionally a view function) | |
to the application. The endpoint is automatically prefixed with the | |
blueprint's name. | |
""" | |
if self.url_prefix is not None: | |
if rule: | |
rule = '/'.join(( | |
self.url_prefix.rstrip('/'), rule.lstrip('/'))) | |
else: | |
rule = self.url_prefix | |
options.setdefault('subdomain', self.subdomain) | |
if endpoint is None: | |
endpoint = _endpoint_from_view_func(view_func) | |
defaults = self.url_defaults | |
if 'defaults' in options: | |
defaults = dict(defaults, **options.pop('defaults')) | |
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), | |
view_func, defaults=defaults, **options) | |
class Blueprint(_PackageBoundObject): | |
"""Represents a blueprint. A blueprint is an object that records | |
functions that will be called with the | |
:class:`~flask.blueprints.BlueprintSetupState` later to register functions | |
or other things on the main application. See :ref:`blueprints` for more | |
information. | |
.. versionadded:: 0.7 | |
""" | |
warn_on_modifications = False | |
_got_registered_once = False | |
#: Blueprint local JSON decoder class to use. | |
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`. | |
json_encoder = None | |
#: Blueprint local JSON decoder class to use. | |
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`. | |
json_decoder = None | |
# TODO remove the next three attrs when Sphinx :inherited-members: works | |
# https://github.com/sphinx-doc/sphinx/issues/741 | |
#: The name of the package or module that this app belongs to. Do not | |
#: change this once it is set by the constructor. | |
import_name = None | |
#: Location of the template files to be added to the template lookup. | |
#: ``None`` if templates should not be added. | |
template_folder = None | |
#: Absolute path to the package on the filesystem. Used to look up | |
#: resources contained in the package. | |
root_path = None | |
def __init__(self, name, import_name, static_folder=None, | |
static_url_path=None, template_folder=None, | |
url_prefix=None, subdomain=None, url_defaults=None, | |
root_path=None): | |
_PackageBoundObject.__init__(self, import_name, template_folder, | |
root_path=root_path) | |
self.name = name | |
self.url_prefix = url_prefix | |
self.subdomain = subdomain | |
self.static_folder = static_folder | |
self.static_url_path = static_url_path | |
self.deferred_functions = [] | |
if url_defaults is None: | |
url_defaults = {} | |
self.url_values_defaults = url_defaults | |
def record(self, func): | |
"""Registers a function that is called when the blueprint is | |
registered on the application. This function is called with the | |
state as argument as returned by the :meth:`make_setup_state` | |
method. | |
""" | |
if self._got_registered_once and self.warn_on_modifications: | |
from warnings import warn | |
warn(Warning('The blueprint was already registered once ' | |
'but is getting modified now. These changes ' | |
'will not show up.')) | |
self.deferred_functions.append(func) | |
def record_once(self, func): | |
"""Works like :meth:`record` but wraps the function in another | |
function that will ensure the function is only called once. If the | |
blueprint is registered a second time on the application, the | |
function passed is not called. | |
""" | |
def wrapper(state): | |
if state.first_registration: | |
func(state) | |
return self.record(update_wrapper(wrapper, func)) | |
def make_setup_state(self, app, options, first_registration=False): | |
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` | |
object that is later passed to the register callback functions. | |
Subclasses can override this to return a subclass of the setup state. | |
""" | |
return BlueprintSetupState(self, app, options, first_registration) | |
def register(self, app, options, first_registration=False): | |
"""Called by :meth:`Flask.register_blueprint` to register all views | |
and callbacks registered on the blueprint with the application. Creates | |
a :class:`.BlueprintSetupState` and calls each :meth:`record` callback | |
with it. | |
:param app: The application this blueprint is being registered with. | |
:param options: Keyword arguments forwarded from | |
:meth:`~Flask.register_blueprint`. | |
:param first_registration: Whether this is the first time this | |
blueprint has been registered on the application. | |
""" | |
self._got_registered_once = True | |
state = self.make_setup_state(app, options, first_registration) | |
if self.has_static_folder: | |
state.add_url_rule( | |
self.static_url_path + '/<path:filename>', | |
view_func=self.send_static_file, endpoint='static' | |
) | |
for deferred in self.deferred_functions: | |
deferred(state) | |
def route(self, rule, **options): | |
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the | |
:func:`url_for` function is prefixed with the name of the blueprint. | |
""" | |
def decorator(f): | |
endpoint = options.pop("endpoint", f.__name__) | |
self.add_url_rule(rule, endpoint, f, **options) | |
return f | |
return decorator | |
def add_url_rule(self, rule, endpoint=None, view_func=None, **options): | |
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for | |
the :func:`url_for` function is prefixed with the name of the blueprint. | |
""" | |
if endpoint: | |
assert '.' not in endpoint, "Blueprint endpoints should not contain dots" | |
if view_func and hasattr(view_func, '__name__'): | |
assert '.' not in view_func.__name__, "Blueprint view function name should not contain dots" | |
self.record(lambda s: | |
s.add_url_rule(rule, endpoint, view_func, **options)) | |
def endpoint(self, endpoint): | |
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not | |
prefix the endpoint with the blueprint name, this has to be done | |
explicitly by the user of this method. If the endpoint is prefixed | |
with a `.` it will be registered to the current blueprint, otherwise | |
it's an application independent endpoint. | |
""" | |
def decorator(f): | |
def register_endpoint(state): | |
state.app.view_functions[endpoint] = f | |
self.record_once(register_endpoint) | |
return f | |
return decorator | |
def app_template_filter(self, name=None): | |
"""Register a custom template filter, available application wide. Like | |
:meth:`Flask.template_filter` but for a blueprint. | |
:param name: the optional name of the filter, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_app_template_filter(f, name=name) | |
return f | |
return decorator | |
def add_app_template_filter(self, f, name=None): | |
"""Register a custom template filter, available application wide. Like | |
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly | |
like the :meth:`app_template_filter` decorator. | |
:param name: the optional name of the filter, otherwise the | |
function name will be used. | |
""" | |
def register_template(state): | |
state.app.jinja_env.filters[name or f.__name__] = f | |
self.record_once(register_template) | |
def app_template_test(self, name=None): | |
"""Register a custom template test, available application wide. Like | |
:meth:`Flask.template_test` but for a blueprint. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the test, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_app_template_test(f, name=name) | |
return f | |
return decorator | |
def add_app_template_test(self, f, name=None): | |
"""Register a custom template test, available application wide. Like | |
:meth:`Flask.add_template_test` but for a blueprint. Works exactly | |
like the :meth:`app_template_test` decorator. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the test, otherwise the | |
function name will be used. | |
""" | |
def register_template(state): | |
state.app.jinja_env.tests[name or f.__name__] = f | |
self.record_once(register_template) | |
def app_template_global(self, name=None): | |
"""Register a custom template global, available application wide. Like | |
:meth:`Flask.template_global` but for a blueprint. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the global, otherwise the | |
function name will be used. | |
""" | |
def decorator(f): | |
self.add_app_template_global(f, name=name) | |
return f | |
return decorator | |
def add_app_template_global(self, f, name=None): | |
"""Register a custom template global, available application wide. Like | |
:meth:`Flask.add_template_global` but for a blueprint. Works exactly | |
like the :meth:`app_template_global` decorator. | |
.. versionadded:: 0.10 | |
:param name: the optional name of the global, otherwise the | |
function name will be used. | |
""" | |
def register_template(state): | |
state.app.jinja_env.globals[name or f.__name__] = f | |
self.record_once(register_template) | |
def before_request(self, f): | |
"""Like :meth:`Flask.before_request` but for a blueprint. This function | |
is only executed before each request that is handled by a function of | |
that blueprint. | |
""" | |
self.record_once(lambda s: s.app.before_request_funcs | |
.setdefault(self.name, []).append(f)) | |
return f | |
def before_app_request(self, f): | |
"""Like :meth:`Flask.before_request`. Such a function is executed | |
before each request, even if outside of a blueprint. | |
""" | |
self.record_once(lambda s: s.app.before_request_funcs | |
.setdefault(None, []).append(f)) | |
return f | |
def before_app_first_request(self, f): | |
"""Like :meth:`Flask.before_first_request`. Such a function is | |
executed before the first request to the application. | |
""" | |
self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) | |
return f | |
def after_request(self, f): | |
"""Like :meth:`Flask.after_request` but for a blueprint. This function | |
is only executed after each request that is handled by a function of | |
that blueprint. | |
""" | |
self.record_once(lambda s: s.app.after_request_funcs | |
.setdefault(self.name, []).append(f)) | |
return f | |
def after_app_request(self, f): | |
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function | |
is executed after each request, even if outside of the blueprint. | |
""" | |
self.record_once(lambda s: s.app.after_request_funcs | |
.setdefault(None, []).append(f)) | |
return f | |
def teardown_request(self, f): | |
"""Like :meth:`Flask.teardown_request` but for a blueprint. This | |
function is only executed when tearing down requests handled by a | |
function of that blueprint. Teardown request functions are executed | |
when the request context is popped, even when no actual request was | |
performed. | |
""" | |
self.record_once(lambda s: s.app.teardown_request_funcs | |
.setdefault(self.name, []).append(f)) | |
return f | |
def teardown_app_request(self, f): | |
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a | |
function is executed when tearing down each request, even if outside of | |
the blueprint. | |
""" | |
self.record_once(lambda s: s.app.teardown_request_funcs | |
.setdefault(None, []).append(f)) | |
return f | |
def context_processor(self, f): | |
"""Like :meth:`Flask.context_processor` but for a blueprint. This | |
function is only executed for requests handled by a blueprint. | |
""" | |
self.record_once(lambda s: s.app.template_context_processors | |
.setdefault(self.name, []).append(f)) | |
return f | |
def app_context_processor(self, f): | |
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a | |
function is executed each request, even if outside of the blueprint. | |
""" | |
self.record_once(lambda s: s.app.template_context_processors | |
.setdefault(None, []).append(f)) | |
return f | |
def app_errorhandler(self, code): | |
"""Like :meth:`Flask.errorhandler` but for a blueprint. This | |
handler is used for all requests, even if outside of the blueprint. | |
""" | |
def decorator(f): | |
self.record_once(lambda s: s.app.errorhandler(code)(f)) | |
return f | |
return decorator | |
def url_value_preprocessor(self, f): | |
"""Registers a function as URL value preprocessor for this | |
blueprint. It's called before the view functions are called and | |
can modify the url values provided. | |
""" | |
self.record_once(lambda s: s.app.url_value_preprocessors | |
.setdefault(self.name, []).append(f)) | |
return f | |
def url_defaults(self, f): | |
"""Callback function for URL defaults for this blueprint. It's called | |
with the endpoint and values and should update the values passed | |
in place. | |
""" | |
self.record_once(lambda s: s.app.url_default_functions | |
.setdefault(self.name, []).append(f)) | |
return f | |
def app_url_value_preprocessor(self, f): | |
"""Same as :meth:`url_value_preprocessor` but application wide. | |
""" | |
self.record_once(lambda s: s.app.url_value_preprocessors | |
.setdefault(None, []).append(f)) | |
return f | |
def app_url_defaults(self, f): | |
"""Same as :meth:`url_defaults` but application wide. | |
""" | |
self.record_once(lambda s: s.app.url_default_functions | |
.setdefault(None, []).append(f)) | |
return f | |
def errorhandler(self, code_or_exception): | |
"""Registers an error handler that becomes active for this blueprint | |
only. Please be aware that routing does not happen local to a | |
blueprint so an error handler for 404 usually is not handled by | |
a blueprint unless it is caused inside a view function. Another | |
special case is the 500 internal server error which is always looked | |
up from the application. | |
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator | |
of the :class:`~flask.Flask` object. | |
""" | |
def decorator(f): | |
self.record_once(lambda s: s.app._register_error_handler( | |
self.name, code_or_exception, f)) | |
return f | |
return decorator | |
def register_error_handler(self, code_or_exception, f): | |
"""Non-decorator version of the :meth:`errorhandler` error attach | |
function, akin to the :meth:`~flask.Flask.register_error_handler` | |
application-wide function of the :class:`~flask.Flask` object but | |
for error handlers limited to this blueprint. | |
.. versionadded:: 0.11 | |
""" | |
self.record_once(lambda s: s.app._register_error_handler( | |
self.name, code_or_exception, f)) |
# -*- coding: utf-8 -*- | |
""" | |
flask.cli | |
~~~~~~~~~ | |
A simple command line application to run flask apps. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from __future__ import print_function | |
import ast | |
import inspect | |
import os | |
import re | |
import ssl | |
import sys | |
import traceback | |
from functools import update_wrapper | |
from operator import attrgetter | |
from threading import Lock, Thread | |
import click | |
from werkzeug.utils import import_string | |
from . import __version__ | |
from ._compat import getargspec, iteritems, reraise, text_type | |
from .globals import current_app | |
from .helpers import get_debug_flag, get_env, get_load_dotenv | |
try: | |
import dotenv | |
except ImportError: | |
dotenv = None | |
class NoAppException(click.UsageError): | |
"""Raised if an application cannot be found or loaded.""" | |
def find_best_app(script_info, module): | |
"""Given a module instance this tries to find the best possible | |
application in the module or raises an exception. | |
""" | |
from . import Flask | |
# Search for the most common names first. | |
for attr_name in ('app', 'application'): | |
app = getattr(module, attr_name, None) | |
if isinstance(app, Flask): | |
return app | |
# Otherwise find the only object that is a Flask instance. | |
matches = [ | |
v for k, v in iteritems(module.__dict__) if isinstance(v, Flask) | |
] | |
if len(matches) == 1: | |
return matches[0] | |
elif len(matches) > 1: | |
raise NoAppException( | |
'Detected multiple Flask applications in module "{module}". Use ' | |
'"FLASK_APP={module}:name" to specify the correct ' | |
'one.'.format(module=module.__name__) | |
) | |
# Search for app factory functions. | |
for attr_name in ('create_app', 'make_app'): | |
app_factory = getattr(module, attr_name, None) | |
if inspect.isfunction(app_factory): | |
try: | |
app = call_factory(script_info, app_factory) | |
if isinstance(app, Flask): | |
return app | |
except TypeError: | |
if not _called_with_wrong_args(app_factory): | |
raise | |
raise NoAppException( | |
'Detected factory "{factory}" in module "{module}", but ' | |
'could not call it without arguments. Use ' | |
'"FLASK_APP=\'{module}:{factory}(args)\'" to specify ' | |
'arguments.'.format( | |
factory=attr_name, module=module.__name__ | |
) | |
) | |
raise NoAppException( | |
'Failed to find Flask application or factory in module "{module}". ' | |
'Use "FLASK_APP={module}:name to specify one.'.format( | |
module=module.__name__ | |
) | |
) | |
def call_factory(script_info, app_factory, arguments=()): | |
"""Takes an app factory, a ``script_info` object and optionally a tuple | |
of arguments. Checks for the existence of a script_info argument and calls | |
the app_factory depending on that and the arguments provided. | |
""" | |
args_spec = getargspec(app_factory) | |
arg_names = args_spec.args | |
arg_defaults = args_spec.defaults | |
if 'script_info' in arg_names: | |
return app_factory(*arguments, script_info=script_info) | |
elif arguments: | |
return app_factory(*arguments) | |
elif not arguments and len(arg_names) == 1 and arg_defaults is None: | |
return app_factory(script_info) | |
return app_factory() | |
def _called_with_wrong_args(factory): | |
"""Check whether calling a function raised a ``TypeError`` because | |
the call failed or because something in the factory raised the | |
error. | |
:param factory: the factory function that was called | |
:return: true if the call failed | |
""" | |
tb = sys.exc_info()[2] | |
try: | |
while tb is not None: | |
if tb.tb_frame.f_code is factory.__code__: | |
# in the factory, it was called successfully | |
return False | |
tb = tb.tb_next | |
# didn't reach the factory | |
return True | |
finally: | |
del tb | |
def find_app_by_string(script_info, module, app_name): | |
"""Checks if the given string is a variable name or a function. If it is a | |
function, it checks for specified arguments and whether it takes a | |
``script_info`` argument and calls the function with the appropriate | |
arguments. | |
""" | |
from flask import Flask | |
match = re.match(r'^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$', app_name) | |
if not match: | |
raise NoAppException( | |
'"{name}" is not a valid variable name or function ' | |
'expression.'.format(name=app_name) | |
) | |
name, args = match.groups() | |
try: | |
attr = getattr(module, name) | |
except AttributeError as e: | |
raise NoAppException(e.args[0]) | |
if inspect.isfunction(attr): | |
if args: | |
try: | |
args = ast.literal_eval('({args},)'.format(args=args)) | |
except (ValueError, SyntaxError)as e: | |
raise NoAppException( | |
'Could not parse the arguments in ' | |
'"{app_name}".'.format(e=e, app_name=app_name) | |
) | |
else: | |
args = () | |
try: | |
app = call_factory(script_info, attr, args) | |
except TypeError as e: | |
if not _called_with_wrong_args(attr): | |
raise | |
raise NoAppException( | |
'{e}\nThe factory "{app_name}" in module "{module}" could not ' | |
'be called with the specified arguments.'.format( | |
e=e, app_name=app_name, module=module.__name__ | |
) | |
) | |
else: | |
app = attr | |
if isinstance(app, Flask): | |
return app | |
raise NoAppException( | |
'A valid Flask application was not obtained from ' | |
'"{module}:{app_name}".'.format( | |
module=module.__name__, app_name=app_name | |
) | |
) | |
def prepare_import(path): | |
"""Given a filename this will try to calculate the python path, add it | |
to the search path and return the actual module name that is expected. | |
""" | |
path = os.path.realpath(path) | |
if os.path.splitext(path)[1] == '.py': | |
path = os.path.splitext(path)[0] | |
if os.path.basename(path) == '__init__': | |
path = os.path.dirname(path) | |
module_name = [] | |
# move up until outside package structure (no __init__.py) | |
while True: | |
path, name = os.path.split(path) | |
module_name.append(name) | |
if not os.path.exists(os.path.join(path, '__init__.py')): | |
break | |
if sys.path[0] != path: | |
sys.path.insert(0, path) | |
return '.'.join(module_name[::-1]) | |
def locate_app(script_info, module_name, app_name, raise_if_not_found=True): | |
__traceback_hide__ = True | |
try: | |
__import__(module_name) | |
except ImportError: | |
# Reraise the ImportError if it occurred within the imported module. | |
# Determine this by checking whether the trace has a depth > 1. | |
if sys.exc_info()[-1].tb_next: | |
raise NoAppException( | |
'While importing "{name}", an ImportError was raised:' | |
'\n\n{tb}'.format(name=module_name, tb=traceback.format_exc()) | |
) | |
elif raise_if_not_found: | |
raise NoAppException( | |
'Could not import "{name}".'.format(name=module_name) | |
) | |
else: | |
return | |
module = sys.modules[module_name] | |
if app_name is None: | |
return find_best_app(script_info, module) | |
else: | |
return find_app_by_string(script_info, module, app_name) | |
def get_version(ctx, param, value): | |
if not value or ctx.resilient_parsing: | |
return | |
message = 'Flask %(version)s\nPython %(python_version)s' | |
click.echo(message % { | |
'version': __version__, | |
'python_version': sys.version, | |
}, color=ctx.color) | |
ctx.exit() | |
version_option = click.Option( | |
['--version'], | |
help='Show the flask version', | |
expose_value=False, | |
callback=get_version, | |
is_flag=True, | |
is_eager=True | |
) | |
class DispatchingApp(object): | |
"""Special application that dispatches to a Flask application which | |
is imported by name in a background thread. If an error happens | |
it is recorded and shown as part of the WSGI handling which in case | |
of the Werkzeug debugger means that it shows up in the browser. | |
""" | |
def __init__(self, loader, use_eager_loading=False): | |
self.loader = loader | |
self._app = None | |
self._lock = Lock() | |
self._bg_loading_exc_info = None | |
if use_eager_loading: | |
self._load_unlocked() | |
else: | |
self._load_in_background() | |
def _load_in_background(self): | |
def _load_app(): | |
__traceback_hide__ = True | |
with self._lock: | |
try: | |
self._load_unlocked() | |
except Exception: | |
self._bg_loading_exc_info = sys.exc_info() | |
t = Thread(target=_load_app, args=()) | |
t.start() | |
def _flush_bg_loading_exception(self): | |
__traceback_hide__ = True | |
exc_info = self._bg_loading_exc_info | |
if exc_info is not None: | |
self._bg_loading_exc_info = None | |
reraise(*exc_info) | |
def _load_unlocked(self): | |
__traceback_hide__ = True | |
self._app = rv = self.loader() | |
self._bg_loading_exc_info = None | |
return rv | |
def __call__(self, environ, start_response): | |
__traceback_hide__ = True | |
if self._app is not None: | |
return self._app(environ, start_response) | |
self._flush_bg_loading_exception() | |
with self._lock: | |
if self._app is not None: | |
rv = self._app | |
else: | |
rv = self._load_unlocked() | |
return rv(environ, start_response) | |
class ScriptInfo(object): | |
"""Help object to deal with Flask applications. This is usually not | |
necessary to interface with as it's used internally in the dispatching | |
to click. In future versions of Flask this object will most likely play | |
a bigger role. Typically it's created automatically by the | |
:class:`FlaskGroup` but you can also manually create it and pass it | |
onwards as click object. | |
""" | |
def __init__(self, app_import_path=None, create_app=None): | |
#: Optionally the import path for the Flask application. | |
self.app_import_path = app_import_path or os.environ.get('FLASK_APP') | |
#: Optionally a function that is passed the script info to create | |
#: the instance of the application. | |
self.create_app = create_app | |
#: A dictionary with arbitrary data that can be associated with | |
#: this script info. | |
self.data = {} | |
self._loaded_app = None | |
def load_app(self): | |
"""Loads the Flask app (if not yet loaded) and returns it. Calling | |
this multiple times will just result in the already loaded app to | |
be returned. | |
""" | |
__traceback_hide__ = True | |
if self._loaded_app is not None: | |
return self._loaded_app | |
app = None | |
if self.create_app is not None: | |
app = call_factory(self, self.create_app) | |
else: | |
if self.app_import_path: | |
path, name = (self.app_import_path.split(':', 1) + [None])[:2] | |
import_name = prepare_import(path) | |
app = locate_app(self, import_name, name) | |
else: | |
for path in ('wsgi.py', 'app.py'): | |
import_name = prepare_import(path) | |
app = locate_app(self, import_name, None, | |
raise_if_not_found=False) | |
if app: | |
break | |
if not app: | |
raise NoAppException( | |
'Could not locate a Flask application. You did not provide ' | |
'the "FLASK_APP" environment variable, and a "wsgi.py" or ' | |
'"app.py" module was not found in the current directory.' | |
) | |
debug = get_debug_flag() | |
# Update the app's debug flag through the descriptor so that other | |
# values repopulate as well. | |
if debug is not None: | |
app.debug = debug | |
self._loaded_app = app | |
return app | |
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) | |
def with_appcontext(f): | |
"""Wraps a callback so that it's guaranteed to be executed with the | |
script's application context. If callbacks are registered directly | |
to the ``app.cli`` object then they are wrapped with this function | |
by default unless it's disabled. | |
""" | |
@click.pass_context | |
def decorator(__ctx, *args, **kwargs): | |
with __ctx.ensure_object(ScriptInfo).load_app().app_context(): | |
return __ctx.invoke(f, *args, **kwargs) | |
return update_wrapper(decorator, f) | |
class AppGroup(click.Group): | |
"""This works similar to a regular click :class:`~click.Group` but it | |
changes the behavior of the :meth:`command` decorator so that it | |
automatically wraps the functions in :func:`with_appcontext`. | |
Not to be confused with :class:`FlaskGroup`. | |
""" | |
def command(self, *args, **kwargs): | |
"""This works exactly like the method of the same name on a regular | |
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext` | |
unless it's disabled by passing ``with_appcontext=False``. | |
""" | |
wrap_for_ctx = kwargs.pop('with_appcontext', True) | |
def decorator(f): | |
if wrap_for_ctx: | |
f = with_appcontext(f) | |
return click.Group.command(self, *args, **kwargs)(f) | |
return decorator | |
def group(self, *args, **kwargs): | |
"""This works exactly like the method of the same name on a regular | |
:class:`click.Group` but it defaults the group class to | |
:class:`AppGroup`. | |
""" | |
kwargs.setdefault('cls', AppGroup) | |
return click.Group.group(self, *args, **kwargs) | |
class FlaskGroup(AppGroup): | |
"""Special subclass of the :class:`AppGroup` group that supports | |
loading more commands from the configured Flask app. Normally a | |
developer does not have to interface with this class but there are | |
some very advanced use cases for which it makes sense to create an | |
instance of this. | |
For information as of why this is useful see :ref:`custom-scripts`. | |
:param add_default_commands: if this is True then the default run and | |
shell commands wil be added. | |
:param add_version_option: adds the ``--version`` option. | |
:param create_app: an optional callback that is passed the script info and | |
returns the loaded app. | |
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` | |
files to set environment variables. Will also change the working | |
directory to the directory containing the first file found. | |
.. versionchanged:: 1.0 | |
If installed, python-dotenv will be used to load environment variables | |
from :file:`.env` and :file:`.flaskenv` files. | |
""" | |
def __init__(self, add_default_commands=True, create_app=None, | |
add_version_option=True, load_dotenv=True, **extra): | |
params = list(extra.pop('params', None) or ()) | |
if add_version_option: | |
params.append(version_option) | |
AppGroup.__init__(self, params=params, **extra) | |
self.create_app = create_app | |
self.load_dotenv = load_dotenv | |
if add_default_commands: | |
self.add_command(run_command) | |
self.add_command(shell_command) | |
self.add_command(routes_command) | |
self._loaded_plugin_commands = False | |
def _load_plugin_commands(self): | |
if self._loaded_plugin_commands: | |
return | |
try: | |
import pkg_resources | |
except ImportError: | |
self._loaded_plugin_commands = True | |
return | |
for ep in pkg_resources.iter_entry_points('flask.commands'): | |
self.add_command(ep.load(), ep.name) | |
self._loaded_plugin_commands = True | |
def get_command(self, ctx, name): | |
self._load_plugin_commands() | |
# We load built-in commands first as these should always be the | |
# same no matter what the app does. If the app does want to | |
# override this it needs to make a custom instance of this group | |
# and not attach the default commands. | |
# | |
# This also means that the script stays functional in case the | |
# application completely fails. | |
rv = AppGroup.get_command(self, ctx, name) | |
if rv is not None: | |
return rv | |
info = ctx.ensure_object(ScriptInfo) | |
try: | |
rv = info.load_app().cli.get_command(ctx, name) | |
if rv is not None: | |
return rv | |
except NoAppException: | |
pass | |
def list_commands(self, ctx): | |
self._load_plugin_commands() | |
# The commands available is the list of both the application (if | |
# available) plus the builtin commands. | |
rv = set(click.Group.list_commands(self, ctx)) | |
info = ctx.ensure_object(ScriptInfo) | |
try: | |
rv.update(info.load_app().cli.list_commands(ctx)) | |
except Exception: | |
# Here we intentionally swallow all exceptions as we don't | |
# want the help page to break if the app does not exist. | |
# If someone attempts to use the command we try to create | |
# the app again and this will give us the error. | |
# However, we will not do so silently because that would confuse | |
# users. | |
traceback.print_exc() | |
return sorted(rv) | |
def main(self, *args, **kwargs): | |
# Set a global flag that indicates that we were invoked from the | |
# command line interface. This is detected by Flask.run to make the | |
# call into a no-op. This is necessary to avoid ugly errors when the | |
# script that is loaded here also attempts to start a server. | |
os.environ['FLASK_RUN_FROM_CLI'] = 'true' | |
if get_load_dotenv(self.load_dotenv): | |
load_dotenv() | |
obj = kwargs.get('obj') | |
if obj is None: | |
obj = ScriptInfo(create_app=self.create_app) | |
kwargs['obj'] = obj | |
kwargs.setdefault('auto_envvar_prefix', 'FLASK') | |
return super(FlaskGroup, self).main(*args, **kwargs) | |
def _path_is_ancestor(path, other): | |
"""Take ``other`` and remove the length of ``path`` from it. Then join it | |
to ``path``. If it is the original value, ``path`` is an ancestor of | |
``other``.""" | |
return os.path.join(path, other[len(path):].lstrip(os.sep)) == other | |
def load_dotenv(path=None): | |
"""Load "dotenv" files in order of precedence to set environment variables. | |
If an env var is already set it is not overwritten, so earlier files in the | |
list are preferred over later files. | |
Changes the current working directory to the location of the first file | |
found, with the assumption that it is in the top level project directory | |
and will be where the Python path should import local packages from. | |
This is a no-op if `python-dotenv`_ is not installed. | |
.. _python-dotenv: https://github.com/theskumar/python-dotenv#readme | |
:param path: Load the file at this location instead of searching. | |
:return: ``True`` if a file was loaded. | |
.. versionadded:: 1.0 | |
""" | |
if dotenv is None: | |
if path or os.path.exists('.env') or os.path.exists('.flaskenv'): | |
click.secho( | |
' * Tip: There are .env files present.' | |
' Do "pip install python-dotenv" to use them.', | |
fg='yellow') | |
return | |
if path is not None: | |
return dotenv.load_dotenv(path) | |
new_dir = None | |
for name in ('.env', '.flaskenv'): | |
path = dotenv.find_dotenv(name, usecwd=True) | |
if not path: | |
continue | |
if new_dir is None: | |
new_dir = os.path.dirname(path) | |
dotenv.load_dotenv(path) | |
if new_dir and os.getcwd() != new_dir: | |
os.chdir(new_dir) | |
return new_dir is not None # at least one file was located and loaded | |
def show_server_banner(env, debug, app_import_path, eager_loading): | |
"""Show extra startup messages the first time the server is run, | |
ignoring the reloader. | |
""" | |
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': | |
return | |
if app_import_path is not None: | |
message = ' * Serving Flask app "{0}"'.format(app_import_path) | |
if not eager_loading: | |
message += ' (lazy loading)' | |
click.echo(message) | |
click.echo(' * Environment: {0}'.format(env)) | |
if env == 'production': | |
click.secho( | |
' WARNING: Do not use the development server in a production' | |
' environment.', fg='red') | |
click.secho(' Use a production WSGI server instead.', dim=True) | |
if debug is not None: | |
click.echo(' * Debug mode: {0}'.format('on' if debug else 'off')) | |
class CertParamType(click.ParamType): | |
"""Click option type for the ``--cert`` option. Allows either an | |
existing file, the string ``'adhoc'``, or an import for a | |
:class:`~ssl.SSLContext` object. | |
""" | |
name = 'path' | |
def __init__(self): | |
self.path_type = click.Path( | |
exists=True, dir_okay=False, resolve_path=True) | |
def convert(self, value, param, ctx): | |
try: | |
return self.path_type(value, param, ctx) | |
except click.BadParameter: | |
value = click.STRING(value, param, ctx).lower() | |
if value == 'adhoc': | |
try: | |
import OpenSSL | |
except ImportError: | |
raise click.BadParameter( | |
'Using ad-hoc certificates requires pyOpenSSL.', | |
ctx, param) | |
return value | |
obj = import_string(value, silent=True) | |
if sys.version_info < (2, 7): | |
if obj: | |
return obj | |
else: | |
if isinstance(obj, ssl.SSLContext): | |
return obj | |
raise | |
def _validate_key(ctx, param, value): | |
"""The ``--key`` option must be specified when ``--cert`` is a file. | |
Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. | |
""" | |
cert = ctx.params.get('cert') | |
is_adhoc = cert == 'adhoc' | |
if sys.version_info < (2, 7): | |
is_context = cert and not isinstance(cert, (text_type, bytes)) | |
else: | |
is_context = isinstance(cert, ssl.SSLContext) | |
if value is not None: | |
if is_adhoc: | |
raise click.BadParameter( | |
'When "--cert" is "adhoc", "--key" is not used.', | |
ctx, param) | |
if is_context: | |
raise click.BadParameter( | |
'When "--cert" is an SSLContext object, "--key is not used.', | |
ctx, param) | |
if not cert: | |
raise click.BadParameter( | |
'"--cert" must also be specified.', | |
ctx, param) | |
ctx.params['cert'] = cert, value | |
else: | |
if cert and not (is_adhoc or is_context): | |
raise click.BadParameter( | |
'Required when using "--cert".', | |
ctx, param) | |
return value | |
@click.command('run', short_help='Runs a development server.') | |
@click.option('--host', '-h', default='127.0.0.1', | |
help='The interface to bind to.') | |
@click.option('--port', '-p', default=5000, | |
help='The port to bind to.') | |
@click.option('--cert', type=CertParamType(), | |
help='Specify a certificate file to use HTTPS.') | |
@click.option('--key', | |
type=click.Path(exists=True, dir_okay=False, resolve_path=True), | |
callback=_validate_key, expose_value=False, | |
help='The key file to use when specifying a certificate.') | |
@click.option('--reload/--no-reload', default=None, | |
help='Enable or disable the reloader. By default the reloader ' | |
'is active if debug is enabled.') | |
@click.option('--debugger/--no-debugger', default=None, | |
help='Enable or disable the debugger. By default the debugger ' | |
'is active if debug is enabled.') | |
@click.option('--eager-loading/--lazy-loader', default=None, | |
help='Enable or disable eager loading. By default eager ' | |
'loading is enabled if the reloader is disabled.') | |
@click.option('--with-threads/--without-threads', default=True, | |
help='Enable or disable multithreading.') | |
@pass_script_info | |
def run_command(info, host, port, reload, debugger, eager_loading, | |
with_threads, cert): | |
"""Run a local development server. | |
This server is for development purposes only. It does not provide | |
the stability, security, or performance of production WSGI servers. | |
The reloader and debugger are enabled by default if | |
FLASK_ENV=development or FLASK_DEBUG=1. | |
""" | |
debug = get_debug_flag() | |
if reload is None: | |
reload = debug | |
if debugger is None: | |
debugger = debug | |
if eager_loading is None: | |
eager_loading = not reload | |
show_server_banner(get_env(), debug, info.app_import_path, eager_loading) | |
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) | |
from werkzeug.serving import run_simple | |
run_simple(host, port, app, use_reloader=reload, use_debugger=debugger, | |
threaded=with_threads, ssl_context=cert) | |
@click.command('shell', short_help='Runs a shell in the app context.') | |
@with_appcontext | |
def shell_command(): | |
"""Runs an interactive Python shell in the context of a given | |
Flask application. The application will populate the default | |
namespace of this shell according to it's configuration. | |
This is useful for executing small snippets of management code | |
without having to manually configure the application. | |
""" | |
import code | |
from flask.globals import _app_ctx_stack | |
app = _app_ctx_stack.top.app | |
banner = 'Python %s on %s\nApp: %s [%s]\nInstance: %s' % ( | |
sys.version, | |
sys.platform, | |
app.import_name, | |
app.env, | |
app.instance_path, | |
) | |
ctx = {} | |
# Support the regular Python interpreter startup script if someone | |
# is using it. | |
startup = os.environ.get('PYTHONSTARTUP') | |
if startup and os.path.isfile(startup): | |
with open(startup, 'r') as f: | |
eval(compile(f.read(), startup, 'exec'), ctx) | |
ctx.update(app.make_shell_context()) | |
code.interact(banner=banner, local=ctx) | |
@click.command('routes', short_help='Show the routes for the app.') | |
@click.option( | |
'--sort', '-s', | |
type=click.Choice(('endpoint', 'methods', 'rule', 'match')), | |
default='endpoint', | |
help=( | |
'Method to sort routes by. "match" is the order that Flask will match ' | |
'routes when dispatching a request.' | |
) | |
) | |
@click.option( | |
'--all-methods', | |
is_flag=True, | |
help="Show HEAD and OPTIONS methods." | |
) | |
@with_appcontext | |
def routes_command(sort, all_methods): | |
"""Show all registered routes with endpoints and methods.""" | |
rules = list(current_app.url_map.iter_rules()) | |
if not rules: | |
click.echo('No routes were registered.') | |
return | |
ignored_methods = set(() if all_methods else ('HEAD', 'OPTIONS')) | |
if sort in ('endpoint', 'rule'): | |
rules = sorted(rules, key=attrgetter(sort)) | |
elif sort == 'methods': | |
rules = sorted(rules, key=lambda rule: sorted(rule.methods)) | |
rule_methods = [ | |
', '.join(sorted(rule.methods - ignored_methods)) for rule in rules | |
] | |
headers = ('Endpoint', 'Methods', 'Rule') | |
widths = ( | |
max(len(rule.endpoint) for rule in rules), | |
max(len(methods) for methods in rule_methods), | |
max(len(rule.rule) for rule in rules), | |
) | |
widths = [max(len(h), w) for h, w in zip(headers, widths)] | |
row = '{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}'.format(*widths) | |
click.echo(row.format(*headers).strip()) | |
click.echo(row.format(*('-' * width for width in widths))) | |
for rule, methods in zip(rules, rule_methods): | |
click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) | |
cli = FlaskGroup(help="""\ | |
A general utility script for Flask applications. | |
Provides commands from Flask, extensions, and the application. Loads the | |
application defined in the FLASK_APP environment variable, or from a wsgi.py | |
file. Setting the FLASK_ENV environment variable to 'development' will enable | |
debug mode. | |
\b | |
{prefix}{cmd} FLASK_APP=hello.py | |
{prefix}{cmd} FLASK_ENV=development | |
{prefix}flask run | |
""".format( | |
cmd='export' if os.name == 'posix' else 'set', | |
prefix='$ ' if os.name == 'posix' else '> ' | |
)) | |
def main(as_module=False): | |
args = sys.argv[1:] | |
if as_module: | |
this_module = 'flask' | |
if sys.version_info < (2, 7): | |
this_module += '.cli' | |
name = 'python -m ' + this_module | |
# Python rewrites "python -m flask" to the path to the file in argv. | |
# Restore the original command so that the reloader works. | |
sys.argv = ['-m', this_module] + args | |
else: | |
name = None | |
cli.main(args=args, prog_name=name) | |
if __name__ == '__main__': | |
main(as_module=True) |
# -*- coding: utf-8 -*- | |
""" | |
flask.config | |
~~~~~~~~~~~~ | |
Implements the configuration related objects. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import types | |
import errno | |
from werkzeug.utils import import_string | |
from ._compat import string_types, iteritems | |
from . import json | |
class ConfigAttribute(object): | |
"""Makes an attribute forward to the config""" | |
def __init__(self, name, get_converter=None): | |
self.__name__ = name | |
self.get_converter = get_converter | |
def __get__(self, obj, type=None): | |
if obj is None: | |
return self | |
rv = obj.config[self.__name__] | |
if self.get_converter is not None: | |
rv = self.get_converter(rv) | |
return rv | |
def __set__(self, obj, value): | |
obj.config[self.__name__] = value | |
class Config(dict): | |
"""Works exactly like a dict but provides ways to fill it from files | |
or special dictionaries. There are two common patterns to populate the | |
config. | |
Either you can fill the config from a config file:: | |
app.config.from_pyfile('yourconfig.cfg') | |
Or alternatively you can define the configuration options in the | |
module that calls :meth:`from_object` or provide an import path to | |
a module that should be loaded. It is also possible to tell it to | |
use the same module and with that provide the configuration values | |
just before the call:: | |
DEBUG = True | |
SECRET_KEY = 'development key' | |
app.config.from_object(__name__) | |
In both cases (loading from any Python file or loading from modules), | |
only uppercase keys are added to the config. This makes it possible to use | |
lowercase values in the config file for temporary values that are not added | |
to the config or to define the config keys in the same file that implements | |
the application. | |
Probably the most interesting way to load configurations is from an | |
environment variable pointing to a file:: | |
app.config.from_envvar('YOURAPPLICATION_SETTINGS') | |
In this case before launching the application you have to set this | |
environment variable to the file you want to use. On Linux and OS X | |
use the export statement:: | |
export YOURAPPLICATION_SETTINGS='/path/to/config/file' | |
On windows use `set` instead. | |
:param root_path: path to which files are read relative from. When the | |
config object is created by the application, this is | |
the application's :attr:`~flask.Flask.root_path`. | |
:param defaults: an optional dictionary of default values | |
""" | |
def __init__(self, root_path, defaults=None): | |
dict.__init__(self, defaults or {}) | |
self.root_path = root_path | |
def from_envvar(self, variable_name, silent=False): | |
"""Loads a configuration from an environment variable pointing to | |
a configuration file. This is basically just a shortcut with nicer | |
error messages for this line of code:: | |
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) | |
:param variable_name: name of the environment variable | |
:param silent: set to ``True`` if you want silent failure for missing | |
files. | |
:return: bool. ``True`` if able to load config, ``False`` otherwise. | |
""" | |
rv = os.environ.get(variable_name) | |
if not rv: | |
if silent: | |
return False | |
raise RuntimeError('The environment variable %r is not set ' | |
'and as such configuration could not be ' | |
'loaded. Set this variable and make it ' | |
'point to a configuration file' % | |
variable_name) | |
return self.from_pyfile(rv, silent=silent) | |
def from_pyfile(self, filename, silent=False): | |
"""Updates the values in the config from a Python file. This function | |
behaves as if the file was imported as module with the | |
:meth:`from_object` function. | |
:param filename: the filename of the config. This can either be an | |
absolute filename or a filename relative to the | |
root path. | |
:param silent: set to ``True`` if you want silent failure for missing | |
files. | |
.. versionadded:: 0.7 | |
`silent` parameter. | |
""" | |
filename = os.path.join(self.root_path, filename) | |
d = types.ModuleType('config') | |
d.__file__ = filename | |
try: | |
with open(filename, mode='rb') as config_file: | |
exec(compile(config_file.read(), filename, 'exec'), d.__dict__) | |
except IOError as e: | |
if silent and e.errno in ( | |
errno.ENOENT, errno.EISDIR, errno.ENOTDIR | |
): | |
return False | |
e.strerror = 'Unable to load configuration file (%s)' % e.strerror | |
raise | |
self.from_object(d) | |
return True | |
def from_object(self, obj): | |
"""Updates the values from the given object. An object can be of one | |
of the following two types: | |
- a string: in this case the object with that name will be imported | |
- an actual object reference: that object is used directly | |
Objects are usually either modules or classes. :meth:`from_object` | |
loads only the uppercase attributes of the module/class. A ``dict`` | |
object will not work with :meth:`from_object` because the keys of a | |
``dict`` are not attributes of the ``dict`` class. | |
Example of module-based configuration:: | |
app.config.from_object('yourapplication.default_config') | |
from yourapplication import default_config | |
app.config.from_object(default_config) | |
You should not use this function to load the actual configuration but | |
rather configuration defaults. The actual config should be loaded | |
with :meth:`from_pyfile` and ideally from a location not within the | |
package because the package might be installed system wide. | |
See :ref:`config-dev-prod` for an example of class-based configuration | |
using :meth:`from_object`. | |
:param obj: an import name or object | |
""" | |
if isinstance(obj, string_types): | |
obj = import_string(obj) | |
for key in dir(obj): | |
if key.isupper(): | |
self[key] = getattr(obj, key) | |
def from_json(self, filename, silent=False): | |
"""Updates the values in the config from a JSON file. This function | |
behaves as if the JSON object was a dictionary and passed to the | |
:meth:`from_mapping` function. | |
:param filename: the filename of the JSON file. This can either be an | |
absolute filename or a filename relative to the | |
root path. | |
:param silent: set to ``True`` if you want silent failure for missing | |
files. | |
.. versionadded:: 0.11 | |
""" | |
filename = os.path.join(self.root_path, filename) | |
try: | |
with open(filename) as json_file: | |
obj = json.loads(json_file.read()) | |
except IOError as e: | |
if silent and e.errno in (errno.ENOENT, errno.EISDIR): | |
return False | |
e.strerror = 'Unable to load configuration file (%s)' % e.strerror | |
raise | |
return self.from_mapping(obj) | |
def from_mapping(self, *mapping, **kwargs): | |
"""Updates the config like :meth:`update` ignoring items with non-upper | |
keys. | |
.. versionadded:: 0.11 | |
""" | |
mappings = [] | |
if len(mapping) == 1: | |
if hasattr(mapping[0], 'items'): | |
mappings.append(mapping[0].items()) | |
else: | |
mappings.append(mapping[0]) | |
elif len(mapping) > 1: | |
raise TypeError( | |
'expected at most 1 positional argument, got %d' % len(mapping) | |
) | |
mappings.append(kwargs.items()) | |
for mapping in mappings: | |
for (key, value) in mapping: | |
if key.isupper(): | |
self[key] = value | |
return True | |
def get_namespace(self, namespace, lowercase=True, trim_namespace=True): | |
"""Returns a dictionary containing a subset of configuration options | |
that match the specified namespace/prefix. Example usage:: | |
app.config['IMAGE_STORE_TYPE'] = 'fs' | |
app.config['IMAGE_STORE_PATH'] = '/var/app/images' | |
app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' | |
image_store_config = app.config.get_namespace('IMAGE_STORE_') | |
The resulting dictionary `image_store_config` would look like:: | |
{ | |
'type': 'fs', | |
'path': '/var/app/images', | |
'base_url': 'http://img.website.com' | |
} | |
This is often useful when configuration options map directly to | |
keyword arguments in functions or class constructors. | |
:param namespace: a configuration namespace | |
:param lowercase: a flag indicating if the keys of the resulting | |
dictionary should be lowercase | |
:param trim_namespace: a flag indicating if the keys of the resulting | |
dictionary should not include the namespace | |
.. versionadded:: 0.11 | |
""" | |
rv = {} | |
for k, v in iteritems(self): | |
if not k.startswith(namespace): | |
continue | |
if trim_namespace: | |
key = k[len(namespace):] | |
else: | |
key = k | |
if lowercase: | |
key = key.lower() | |
rv[key] = v | |
return rv | |
def __repr__(self): | |
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self)) |
# -*- coding: utf-8 -*- | |
""" | |
flask.ctx | |
~~~~~~~~~ | |
Implements the objects required to keep the context. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import sys | |
from functools import update_wrapper | |
from werkzeug.exceptions import HTTPException | |
from .globals import _request_ctx_stack, _app_ctx_stack | |
from .signals import appcontext_pushed, appcontext_popped | |
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise | |
# a singleton sentinel value for parameter defaults | |
_sentinel = object() | |
class _AppCtxGlobals(object): | |
"""A plain object. Used as a namespace for storing data during an | |
application context. | |
Creating an app context automatically creates this object, which is | |
made available as the :data:`g` proxy. | |
.. describe:: 'key' in g | |
Check whether an attribute is present. | |
.. versionadded:: 0.10 | |
.. describe:: iter(g) | |
Return an iterator over the attribute names. | |
.. versionadded:: 0.10 | |
""" | |
def get(self, name, default=None): | |
"""Get an attribute by name, or a default value. Like | |
:meth:`dict.get`. | |
:param name: Name of attribute to get. | |
:param default: Value to return if the attribute is not present. | |
.. versionadded:: 0.10 | |
""" | |
return self.__dict__.get(name, default) | |
def pop(self, name, default=_sentinel): | |
"""Get and remove an attribute by name. Like :meth:`dict.pop`. | |
:param name: Name of attribute to pop. | |
:param default: Value to return if the attribute is not present, | |
instead of raise a ``KeyError``. | |
.. versionadded:: 0.11 | |
""" | |
if default is _sentinel: | |
return self.__dict__.pop(name) | |
else: | |
return self.__dict__.pop(name, default) | |
def setdefault(self, name, default=None): | |
"""Get the value of an attribute if it is present, otherwise | |
set and return a default value. Like :meth:`dict.setdefault`. | |
:param name: Name of attribute to get. | |
:param: default: Value to set and return if the attribute is not | |
present. | |
.. versionadded:: 0.11 | |
""" | |
return self.__dict__.setdefault(name, default) | |
def __contains__(self, item): | |
return item in self.__dict__ | |
def __iter__(self): | |
return iter(self.__dict__) | |
def __repr__(self): | |
top = _app_ctx_stack.top | |
if top is not None: | |
return '<flask.g of %r>' % top.app.name | |
return object.__repr__(self) | |
def after_this_request(f): | |
"""Executes a function after this request. This is useful to modify | |
response objects. The function is passed the response object and has | |
to return the same or a new one. | |
Example:: | |
@app.route('/') | |
def index(): | |
@after_this_request | |
def add_header(response): | |
response.headers['X-Foo'] = 'Parachute' | |
return response | |
return 'Hello World!' | |
This is more useful if a function other than the view function wants to | |
modify a response. For instance think of a decorator that wants to add | |
some headers without converting the return value into a response object. | |
.. versionadded:: 0.9 | |
""" | |
_request_ctx_stack.top._after_request_functions.append(f) | |
return f | |
def copy_current_request_context(f): | |
"""A helper function that decorates a function to retain the current | |
request context. This is useful when working with greenlets. The moment | |
the function is decorated a copy of the request context is created and | |
then pushed when the function is called. | |
Example:: | |
import gevent | |
from flask import copy_current_request_context | |
@app.route('/') | |
def index(): | |
@copy_current_request_context | |
def do_some_work(): | |
# do some work here, it can access flask.request like you | |
# would otherwise in the view function. | |
... | |
gevent.spawn(do_some_work) | |
return 'Regular response' | |
.. versionadded:: 0.10 | |
""" | |
top = _request_ctx_stack.top | |
if top is None: | |
raise RuntimeError('This decorator can only be used at local scopes ' | |
'when a request context is on the stack. For instance within ' | |
'view functions.') | |
reqctx = top.copy() | |
def wrapper(*args, **kwargs): | |
with reqctx: | |
return f(*args, **kwargs) | |
return update_wrapper(wrapper, f) | |
def has_request_context(): | |
"""If you have code that wants to test if a request context is there or | |
not this function can be used. For instance, you may want to take advantage | |
of request information if the request object is available, but fail | |
silently if it is unavailable. | |
:: | |
class User(db.Model): | |
def __init__(self, username, remote_addr=None): | |
self.username = username | |
if remote_addr is None and has_request_context(): | |
remote_addr = request.remote_addr | |
self.remote_addr = remote_addr | |
Alternatively you can also just test any of the context bound objects | |
(such as :class:`request` or :class:`g` for truthness):: | |
class User(db.Model): | |
def __init__(self, username, remote_addr=None): | |
self.username = username | |
if remote_addr is None and request: | |
remote_addr = request.remote_addr | |
self.remote_addr = remote_addr | |
.. versionadded:: 0.7 | |
""" | |
return _request_ctx_stack.top is not None | |
def has_app_context(): | |
"""Works like :func:`has_request_context` but for the application | |
context. You can also just do a boolean check on the | |
:data:`current_app` object instead. | |
.. versionadded:: 0.9 | |
""" | |
return _app_ctx_stack.top is not None | |
class AppContext(object): | |
"""The application context binds an application object implicitly | |
to the current thread or greenlet, similar to how the | |
:class:`RequestContext` binds request information. The application | |
context is also implicitly created if a request context is created | |
but the application is not on top of the individual application | |
context. | |
""" | |
def __init__(self, app): | |
self.app = app | |
self.url_adapter = app.create_url_adapter(None) | |
self.g = app.app_ctx_globals_class() | |
# Like request context, app contexts can be pushed multiple times | |
# but there a basic "refcount" is enough to track them. | |
self._refcnt = 0 | |
def push(self): | |
"""Binds the app context to the current context.""" | |
self._refcnt += 1 | |
if hasattr(sys, 'exc_clear'): | |
sys.exc_clear() | |
_app_ctx_stack.push(self) | |
appcontext_pushed.send(self.app) | |
def pop(self, exc=_sentinel): | |
"""Pops the app context.""" | |
try: | |
self._refcnt -= 1 | |
if self._refcnt <= 0: | |
if exc is _sentinel: | |
exc = sys.exc_info()[1] | |
self.app.do_teardown_appcontext(exc) | |
finally: | |
rv = _app_ctx_stack.pop() | |
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ | |
% (rv, self) | |
appcontext_popped.send(self.app) | |
def __enter__(self): | |
self.push() | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self.pop(exc_value) | |
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: | |
reraise(exc_type, exc_value, tb) | |
class RequestContext(object): | |
"""The request context contains all request relevant information. It is | |
created at the beginning of the request and pushed to the | |
`_request_ctx_stack` and removed at the end of it. It will create the | |
URL adapter and request object for the WSGI environment provided. | |
Do not attempt to use this class directly, instead use | |
:meth:`~flask.Flask.test_request_context` and | |
:meth:`~flask.Flask.request_context` to create this object. | |
When the request context is popped, it will evaluate all the | |
functions registered on the application for teardown execution | |
(:meth:`~flask.Flask.teardown_request`). | |
The request context is automatically popped at the end of the request | |
for you. In debug mode the request context is kept around if | |
exceptions happen so that interactive debuggers have a chance to | |
introspect the data. With 0.4 this can also be forced for requests | |
that did not fail and outside of ``DEBUG`` mode. By setting | |
``'flask._preserve_context'`` to ``True`` on the WSGI environment the | |
context will not pop itself at the end of the request. This is used by | |
the :meth:`~flask.Flask.test_client` for example to implement the | |
deferred cleanup functionality. | |
You might find this helpful for unittests where you need the | |
information from the context local around for a little longer. Make | |
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in | |
that situation, otherwise your unittests will leak memory. | |
""" | |
def __init__(self, app, environ, request=None): | |
self.app = app | |
if request is None: | |
request = app.request_class(environ) | |
self.request = request | |
self.url_adapter = app.create_url_adapter(self.request) | |
self.flashes = None | |
self.session = None | |
# Request contexts can be pushed multiple times and interleaved with | |
# other request contexts. Now only if the last level is popped we | |
# get rid of them. Additionally if an application context is missing | |
# one is created implicitly so for each level we add this information | |
self._implicit_app_ctx_stack = [] | |
# indicator if the context was preserved. Next time another context | |
# is pushed the preserved context is popped. | |
self.preserved = False | |
# remembers the exception for pop if there is one in case the context | |
# preservation kicks in. | |
self._preserved_exc = None | |
# Functions that should be executed after the request on the response | |
# object. These will be called before the regular "after_request" | |
# functions. | |
self._after_request_functions = [] | |
self.match_request() | |
def _get_g(self): | |
return _app_ctx_stack.top.g | |
def _set_g(self, value): | |
_app_ctx_stack.top.g = value | |
g = property(_get_g, _set_g) | |
del _get_g, _set_g | |
def copy(self): | |
"""Creates a copy of this request context with the same request object. | |
This can be used to move a request context to a different greenlet. | |
Because the actual request object is the same this cannot be used to | |
move a request context to a different thread unless access to the | |
request object is locked. | |
.. versionadded:: 0.10 | |
""" | |
return self.__class__(self.app, | |
environ=self.request.environ, | |
request=self.request | |
) | |
def match_request(self): | |
"""Can be overridden by a subclass to hook into the matching | |
of the request. | |
""" | |
try: | |
url_rule, self.request.view_args = \ | |
self.url_adapter.match(return_rule=True) | |
self.request.url_rule = url_rule | |
except HTTPException as e: | |
self.request.routing_exception = e | |
def push(self): | |
"""Binds the request context to the current context.""" | |
# If an exception occurs in debug mode or if context preservation is | |
# activated under exception situations exactly one context stays | |
# on the stack. The rationale is that you want to access that | |
# information under debug situations. However if someone forgets to | |
# pop that context again we want to make sure that on the next push | |
# it's invalidated, otherwise we run at risk that something leaks | |
# memory. This is usually only a problem in test suite since this | |
# functionality is not active in production environments. | |
top = _request_ctx_stack.top | |
if top is not None and top.preserved: | |
top.pop(top._preserved_exc) | |
# Before we push the request context we have to ensure that there | |
# is an application context. | |
app_ctx = _app_ctx_stack.top | |
if app_ctx is None or app_ctx.app != self.app: | |
app_ctx = self.app.app_context() | |
app_ctx.push() | |
self._implicit_app_ctx_stack.append(app_ctx) | |
else: | |
self._implicit_app_ctx_stack.append(None) | |
if hasattr(sys, 'exc_clear'): | |
sys.exc_clear() | |
_request_ctx_stack.push(self) | |
# Open the session at the moment that the request context is available. | |
# This allows a custom open_session method to use the request context. | |
# Only open a new session if this is the first time the request was | |
# pushed, otherwise stream_with_context loses the session. | |
if self.session is None: | |
session_interface = self.app.session_interface | |
self.session = session_interface.open_session( | |
self.app, self.request | |
) | |
if self.session is None: | |
self.session = session_interface.make_null_session(self.app) | |
def pop(self, exc=_sentinel): | |
"""Pops the request context and unbinds it by doing that. This will | |
also trigger the execution of functions registered by the | |
:meth:`~flask.Flask.teardown_request` decorator. | |
.. versionchanged:: 0.9 | |
Added the `exc` argument. | |
""" | |
app_ctx = self._implicit_app_ctx_stack.pop() | |
try: | |
clear_request = False | |
if not self._implicit_app_ctx_stack: | |
self.preserved = False | |
self._preserved_exc = None | |
if exc is _sentinel: | |
exc = sys.exc_info()[1] | |
self.app.do_teardown_request(exc) | |
# If this interpreter supports clearing the exception information | |
# we do that now. This will only go into effect on Python 2.x, | |
# on 3.x it disappears automatically at the end of the exception | |
# stack. | |
if hasattr(sys, 'exc_clear'): | |
sys.exc_clear() | |
request_close = getattr(self.request, 'close', None) | |
if request_close is not None: | |
request_close() | |
clear_request = True | |
finally: | |
rv = _request_ctx_stack.pop() | |
# get rid of circular dependencies at the end of the request | |
# so that we don't require the GC to be active. | |
if clear_request: | |
rv.request.environ['werkzeug.request'] = None | |
# Get rid of the app as well if necessary. | |
if app_ctx is not None: | |
app_ctx.pop(exc) | |
assert rv is self, 'Popped wrong request context. ' \ | |
'(%r instead of %r)' % (rv, self) | |
def auto_pop(self, exc): | |
if self.request.environ.get('flask._preserve_context') or \ | |
(exc is not None and self.app.preserve_context_on_exception): | |
self.preserved = True | |
self._preserved_exc = exc | |
else: | |
self.pop(exc) | |
def __enter__(self): | |
self.push() | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
# do not pop the request stack if we are in debug mode and an | |
# exception happened. This will allow the debugger to still | |
# access the request object in the interactive shell. Furthermore | |
# the context can be force kept alive for the test client. | |
# See flask.testing for how this works. | |
self.auto_pop(exc_value) | |
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: | |
reraise(exc_type, exc_value, tb) | |
def __repr__(self): | |
return '<%s \'%s\' [%s] of %s>' % ( | |
self.__class__.__name__, | |
self.request.url, | |
self.request.method, | |
self.app.name, | |
) |
# -*- coding: utf-8 -*- | |
""" | |
flask.debughelpers | |
~~~~~~~~~~~~~~~~~~ | |
Various helpers to make the development experience better. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
from warnings import warn | |
from ._compat import implements_to_string, text_type | |
from .app import Flask | |
from .blueprints import Blueprint | |
from .globals import _request_ctx_stack | |
class UnexpectedUnicodeError(AssertionError, UnicodeError): | |
"""Raised in places where we want some better error reporting for | |
unexpected unicode or binary data. | |
""" | |
@implements_to_string | |
class DebugFilesKeyError(KeyError, AssertionError): | |
"""Raised from request.files during debugging. The idea is that it can | |
provide a better error message than just a generic KeyError/BadRequest. | |
""" | |
def __init__(self, request, key): | |
form_matches = request.form.getlist(key) | |
buf = ['You tried to access the file "%s" in the request.files ' | |
'dictionary but it does not exist. The mimetype for the request ' | |
'is "%s" instead of "multipart/form-data" which means that no ' | |
'file contents were transmitted. To fix this error you should ' | |
'provide enctype="multipart/form-data" in your form.' % | |
(key, request.mimetype)] | |
if form_matches: | |
buf.append('\n\nThe browser instead transmitted some file names. ' | |
'This was submitted: %s' % ', '.join('"%s"' % x | |
for x in form_matches)) | |
self.msg = ''.join(buf) | |
def __str__(self): | |
return self.msg | |
class FormDataRoutingRedirect(AssertionError): | |
"""This exception is raised by Flask in debug mode if it detects a | |
redirect caused by the routing system when the request method is not | |
GET, HEAD or OPTIONS. Reasoning: form data will be dropped. | |
""" | |
def __init__(self, request): | |
exc = request.routing_exception | |
buf = ['A request was sent to this URL (%s) but a redirect was ' | |
'issued automatically by the routing system to "%s".' | |
% (request.url, exc.new_url)] | |
# In case just a slash was appended we can be extra helpful | |
if request.base_url + '/' == exc.new_url.split('?')[0]: | |
buf.append(' The URL was defined with a trailing slash so ' | |
'Flask will automatically redirect to the URL ' | |
'with the trailing slash if it was accessed ' | |
'without one.') | |
buf.append(' Make sure to directly send your %s-request to this URL ' | |
'since we can\'t make browsers or HTTP clients redirect ' | |
'with form data reliably or without user interaction.' % | |
request.method) | |
buf.append('\n\nNote: this exception is only raised in debug mode') | |
AssertionError.__init__(self, ''.join(buf).encode('utf-8')) | |
def attach_enctype_error_multidict(request): | |
"""Since Flask 0.8 we're monkeypatching the files object in case a | |
request is detected that does not use multipart form data but the files | |
object is accessed. | |
""" | |
oldcls = request.files.__class__ | |
class newcls(oldcls): | |
def __getitem__(self, key): | |
try: | |
return oldcls.__getitem__(self, key) | |
except KeyError: | |
if key not in request.form: | |
raise | |
raise DebugFilesKeyError(request, key) | |
newcls.__name__ = oldcls.__name__ | |
newcls.__module__ = oldcls.__module__ | |
request.files.__class__ = newcls | |
def _dump_loader_info(loader): | |
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__) | |
for key, value in sorted(loader.__dict__.items()): | |
if key.startswith('_'): | |
continue | |
if isinstance(value, (tuple, list)): | |
if not all(isinstance(x, (str, text_type)) for x in value): | |
continue | |
yield '%s:' % key | |
for item in value: | |
yield ' - %s' % item | |
continue | |
elif not isinstance(value, (str, text_type, int, float, bool)): | |
continue | |
yield '%s: %r' % (key, value) | |
def explain_template_loading_attempts(app, template, attempts): | |
"""This should help developers understand what failed""" | |
info = ['Locating template "%s":' % template] | |
total_found = 0 | |
blueprint = None | |
reqctx = _request_ctx_stack.top | |
if reqctx is not None and reqctx.request.blueprint is not None: | |
blueprint = reqctx.request.blueprint | |
for idx, (loader, srcobj, triple) in enumerate(attempts): | |
if isinstance(srcobj, Flask): | |
src_info = 'application "%s"' % srcobj.import_name | |
elif isinstance(srcobj, Blueprint): | |
src_info = 'blueprint "%s" (%s)' % (srcobj.name, | |
srcobj.import_name) | |
else: | |
src_info = repr(srcobj) | |
info.append('% 5d: trying loader of %s' % ( | |
idx + 1, src_info)) | |
for line in _dump_loader_info(loader): | |
info.append(' %s' % line) | |
if triple is None: | |
detail = 'no match' | |
else: | |
detail = 'found (%r)' % (triple[1] or '<string>') | |
total_found += 1 | |
info.append(' -> %s' % detail) | |
seems_fishy = False | |
if total_found == 0: | |
info.append('Error: the template could not be found.') | |
seems_fishy = True | |
elif total_found > 1: | |
info.append('Warning: multiple loaders returned a match for the template.') | |
seems_fishy = True | |
if blueprint is not None and seems_fishy: | |
info.append(' The template was looked up from an endpoint that ' | |
'belongs to the blueprint "%s".' % blueprint) | |
info.append(' Maybe you did not place a template in the right folder?') | |
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates') | |
app.logger.info('\n'.join(info)) | |
def explain_ignored_app_run(): | |
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': | |
warn(Warning('Silently ignoring app.run() because the ' | |
'application is run from the flask command line ' | |
'executable. Consider putting app.run() behind an ' | |
'if __name__ == "__main__" guard to silence this ' | |
'warning.'), stacklevel=3) |
# -*- coding: utf-8 -*- | |
""" | |
flask.globals | |
~~~~~~~~~~~~~ | |
Defines all the global objects that are proxies to the current | |
active context. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from functools import partial | |
from werkzeug.local import LocalStack, LocalProxy | |
_request_ctx_err_msg = '''\ | |
Working outside of request context. | |
This typically means that you attempted to use functionality that needed | |
an active HTTP request. Consult the documentation on testing for | |
information about how to avoid this problem.\ | |
''' | |
_app_ctx_err_msg = '''\ | |
Working outside of application context. | |
This typically means that you attempted to use functionality that needed | |
to interface with the current application object in some way. To solve | |
this, set up an application context with app.app_context(). See the | |
documentation for more information.\ | |
''' | |
def _lookup_req_object(name): | |
top = _request_ctx_stack.top | |
if top is None: | |
raise RuntimeError(_request_ctx_err_msg) | |
return getattr(top, name) | |
def _lookup_app_object(name): | |
top = _app_ctx_stack.top | |
if top is None: | |
raise RuntimeError(_app_ctx_err_msg) | |
return getattr(top, name) | |
def _find_app(): | |
top = _app_ctx_stack.top | |
if top is None: | |
raise RuntimeError(_app_ctx_err_msg) | |
return top.app | |
# context locals | |
_request_ctx_stack = LocalStack() | |
_app_ctx_stack = LocalStack() | |
current_app = LocalProxy(_find_app) | |
request = LocalProxy(partial(_lookup_req_object, 'request')) | |
session = LocalProxy(partial(_lookup_req_object, 'session')) | |
g = LocalProxy(partial(_lookup_app_object, 'g')) |
# -*- coding: utf-8 -*- | |
""" | |
flask.helpers | |
~~~~~~~~~~~~~ | |
Implements various helpers. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import socket | |
import sys | |
import pkgutil | |
import posixpath | |
import mimetypes | |
from time import time | |
from zlib import adler32 | |
from threading import RLock | |
import unicodedata | |
from werkzeug.routing import BuildError | |
from functools import update_wrapper | |
from werkzeug.urls import url_quote | |
from werkzeug.datastructures import Headers, Range | |
from werkzeug.exceptions import BadRequest, NotFound, \ | |
RequestedRangeNotSatisfiable | |
from werkzeug.wsgi import wrap_file | |
from jinja2 import FileSystemLoader | |
from .signals import message_flashed | |
from .globals import session, _request_ctx_stack, _app_ctx_stack, \ | |
current_app, request | |
from ._compat import string_types, text_type, PY2 | |
# sentinel | |
_missing = object() | |
# what separators does this operating system provide that are not a slash? | |
# this is used by the send_from_directory function to ensure that nobody is | |
# able to access files from outside the filesystem. | |
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] | |
if sep not in (None, '/')) | |
def get_env(): | |
"""Get the environment the app is running in, indicated by the | |
:envvar:`FLASK_ENV` environment variable. The default is | |
``'production'``. | |
""" | |
return os.environ.get('FLASK_ENV') or 'production' | |
def get_debug_flag(): | |
"""Get whether debug mode should be enabled for the app, indicated | |
by the :envvar:`FLASK_DEBUG` environment variable. The default is | |
``True`` if :func:`.get_env` returns ``'development'``, or ``False`` | |
otherwise. | |
""" | |
val = os.environ.get('FLASK_DEBUG') | |
if not val: | |
return get_env() == 'development' | |
return val.lower() not in ('0', 'false', 'no') | |
def get_load_dotenv(default=True): | |
"""Get whether the user has disabled loading dotenv files by setting | |
:envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the | |
files. | |
:param default: What to return if the env var isn't set. | |
""" | |
val = os.environ.get('FLASK_SKIP_DOTENV') | |
if not val: | |
return default | |
return val.lower() in ('0', 'false', 'no') | |
def _endpoint_from_view_func(view_func): | |
"""Internal helper that returns the default endpoint for a given | |
function. This always is the function name. | |
""" | |
assert view_func is not None, 'expected view func if endpoint ' \ | |
'is not provided.' | |
return view_func.__name__ | |
def stream_with_context(generator_or_function): | |
"""Request contexts disappear when the response is started on the server. | |
This is done for efficiency reasons and to make it less likely to encounter | |
memory leaks with badly written WSGI middlewares. The downside is that if | |
you are using streamed responses, the generator cannot access request bound | |
information any more. | |
This function however can help you keep the context around for longer:: | |
from flask import stream_with_context, request, Response | |
@app.route('/stream') | |
def streamed_response(): | |
@stream_with_context | |
def generate(): | |
yield 'Hello ' | |
yield request.args['name'] | |
yield '!' | |
return Response(generate()) | |
Alternatively it can also be used around a specific generator:: | |
from flask import stream_with_context, request, Response | |
@app.route('/stream') | |
def streamed_response(): | |
def generate(): | |
yield 'Hello ' | |
yield request.args['name'] | |
yield '!' | |
return Response(stream_with_context(generate())) | |
.. versionadded:: 0.9 | |
""" | |
try: | |
gen = iter(generator_or_function) | |
except TypeError: | |
def decorator(*args, **kwargs): | |
gen = generator_or_function(*args, **kwargs) | |
return stream_with_context(gen) | |
return update_wrapper(decorator, generator_or_function) | |
def generator(): | |
ctx = _request_ctx_stack.top | |
if ctx is None: | |
raise RuntimeError('Attempted to stream with context but ' | |
'there was no context in the first place to keep around.') | |
with ctx: | |
# Dummy sentinel. Has to be inside the context block or we're | |
# not actually keeping the context around. | |
yield None | |
# The try/finally is here so that if someone passes a WSGI level | |
# iterator in we're still running the cleanup logic. Generators | |
# don't need that because they are closed on their destruction | |
# automatically. | |
try: | |
for item in gen: | |
yield item | |
finally: | |
if hasattr(gen, 'close'): | |
gen.close() | |
# The trick is to start the generator. Then the code execution runs until | |
# the first dummy None is yielded at which point the context was already | |
# pushed. This item is discarded. Then when the iteration continues the | |
# real generator is executed. | |
wrapped_g = generator() | |
next(wrapped_g) | |
return wrapped_g | |
def make_response(*args): | |
"""Sometimes it is necessary to set additional headers in a view. Because | |
views do not have to return response objects but can return a value that | |
is converted into a response object by Flask itself, it becomes tricky to | |
add headers to it. This function can be called instead of using a return | |
and you will get a response object which you can use to attach headers. | |
If view looked like this and you want to add a new header:: | |
def index(): | |
return render_template('index.html', foo=42) | |
You can now do something like this:: | |
def index(): | |
response = make_response(render_template('index.html', foo=42)) | |
response.headers['X-Parachutes'] = 'parachutes are cool' | |
return response | |
This function accepts the very same arguments you can return from a | |
view function. This for example creates a response with a 404 error | |
code:: | |
response = make_response(render_template('not_found.html'), 404) | |
The other use case of this function is to force the return value of a | |
view function into a response which is helpful with view | |
decorators:: | |
response = make_response(view_function()) | |
response.headers['X-Parachutes'] = 'parachutes are cool' | |
Internally this function does the following things: | |
- if no arguments are passed, it creates a new response argument | |
- if one argument is passed, :meth:`flask.Flask.make_response` | |
is invoked with it. | |
- if more than one argument is passed, the arguments are passed | |
to the :meth:`flask.Flask.make_response` function as tuple. | |
.. versionadded:: 0.6 | |
""" | |
if not args: | |
return current_app.response_class() | |
if len(args) == 1: | |
args = args[0] | |
return current_app.make_response(args) | |
def url_for(endpoint, **values): | |
"""Generates a URL to the given endpoint with the method provided. | |
Variable arguments that are unknown to the target endpoint are appended | |
to the generated URL as query arguments. If the value of a query argument | |
is ``None``, the whole pair is skipped. In case blueprints are active | |
you can shortcut references to the same blueprint by prefixing the | |
local endpoint with a dot (``.``). | |
This will reference the index function local to the current blueprint:: | |
url_for('.index') | |
For more information, head over to the :ref:`Quickstart <url-building>`. | |
To integrate applications, :class:`Flask` has a hook to intercept URL build | |
errors through :attr:`Flask.url_build_error_handlers`. The `url_for` | |
function results in a :exc:`~werkzeug.routing.BuildError` when the current | |
app does not have a URL for the given endpoint and values. When it does, the | |
:data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if | |
it is not ``None``, which can return a string to use as the result of | |
`url_for` (instead of `url_for`'s default to raise the | |
:exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. | |
An example:: | |
def external_url_handler(error, endpoint, values): | |
"Looks up an external URL when `url_for` cannot build a URL." | |
# This is an example of hooking the build_error_handler. | |
# Here, lookup_url is some utility function you've built | |
# which looks up the endpoint in some external URL registry. | |
url = lookup_url(endpoint, **values) | |
if url is None: | |
# External lookup did not have a URL. | |
# Re-raise the BuildError, in context of original traceback. | |
exc_type, exc_value, tb = sys.exc_info() | |
if exc_value is error: | |
raise exc_type, exc_value, tb | |
else: | |
raise error | |
# url_for will use this result, instead of raising BuildError. | |
return url | |
app.url_build_error_handlers.append(external_url_handler) | |
Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and | |
`endpoint` and `values` are the arguments passed into `url_for`. Note | |
that this is for building URLs outside the current application, and not for | |
handling 404 NotFound errors. | |
.. versionadded:: 0.10 | |
The `_scheme` parameter was added. | |
.. versionadded:: 0.9 | |
The `_anchor` and `_method` parameters were added. | |
.. versionadded:: 0.9 | |
Calls :meth:`Flask.handle_build_error` on | |
:exc:`~werkzeug.routing.BuildError`. | |
:param endpoint: the endpoint of the URL (name of the function) | |
:param values: the variable arguments of the URL rule | |
:param _external: if set to ``True``, an absolute URL is generated. Server | |
address can be changed via ``SERVER_NAME`` configuration variable which | |
defaults to `localhost`. | |
:param _scheme: a string specifying the desired URL scheme. The `_external` | |
parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default | |
behavior uses the same scheme as the current request, or | |
``PREFERRED_URL_SCHEME`` from the :ref:`app configuration <config>` if no | |
request context is available. As of Werkzeug 0.10, this also can be set | |
to an empty string to build protocol-relative URLs. | |
:param _anchor: if provided this is added as anchor to the URL. | |
:param _method: if provided this explicitly specifies an HTTP method. | |
""" | |
appctx = _app_ctx_stack.top | |
reqctx = _request_ctx_stack.top | |
if appctx is None: | |
raise RuntimeError( | |
'Attempted to generate a URL without the application context being' | |
' pushed. This has to be executed when application context is' | |
' available.' | |
) | |
# If request specific information is available we have some extra | |
# features that support "relative" URLs. | |
if reqctx is not None: | |
url_adapter = reqctx.url_adapter | |
blueprint_name = request.blueprint | |
if endpoint[:1] == '.': | |
if blueprint_name is not None: | |
endpoint = blueprint_name + endpoint | |
else: | |
endpoint = endpoint[1:] | |
external = values.pop('_external', False) | |
# Otherwise go with the url adapter from the appctx and make | |
# the URLs external by default. | |
else: | |
url_adapter = appctx.url_adapter | |
if url_adapter is None: | |
raise RuntimeError( | |
'Application was not able to create a URL adapter for request' | |
' independent URL generation. You might be able to fix this by' | |
' setting the SERVER_NAME config variable.' | |
) | |
external = values.pop('_external', True) | |
anchor = values.pop('_anchor', None) | |
method = values.pop('_method', None) | |
scheme = values.pop('_scheme', None) | |
appctx.app.inject_url_defaults(endpoint, values) | |
# This is not the best way to deal with this but currently the | |
# underlying Werkzeug router does not support overriding the scheme on | |
# a per build call basis. | |
old_scheme = None | |
if scheme is not None: | |
if not external: | |
raise ValueError('When specifying _scheme, _external must be True') | |
old_scheme = url_adapter.url_scheme | |
url_adapter.url_scheme = scheme | |
try: | |
try: | |
rv = url_adapter.build(endpoint, values, method=method, | |
force_external=external) | |
finally: | |
if old_scheme is not None: | |
url_adapter.url_scheme = old_scheme | |
except BuildError as error: | |
# We need to inject the values again so that the app callback can | |
# deal with that sort of stuff. | |
values['_external'] = external | |
values['_anchor'] = anchor | |
values['_method'] = method | |
values['_scheme'] = scheme | |
return appctx.app.handle_url_build_error(error, endpoint, values) | |
if anchor is not None: | |
rv += '#' + url_quote(anchor) | |
return rv | |
def get_template_attribute(template_name, attribute): | |
"""Loads a macro (or variable) a template exports. This can be used to | |
invoke a macro from within Python code. If you for example have a | |
template named :file:`_cider.html` with the following contents: | |
.. sourcecode:: html+jinja | |
{% macro hello(name) %}Hello {{ name }}!{% endmacro %} | |
You can access this from Python code like this:: | |
hello = get_template_attribute('_cider.html', 'hello') | |
return hello('World') | |
.. versionadded:: 0.2 | |
:param template_name: the name of the template | |
:param attribute: the name of the variable of macro to access | |
""" | |
return getattr(current_app.jinja_env.get_template(template_name).module, | |
attribute) | |
def flash(message, category='message'): | |
"""Flashes a message to the next request. In order to remove the | |
flashed message from the session and to display it to the user, | |
the template has to call :func:`get_flashed_messages`. | |
.. versionchanged:: 0.3 | |
`category` parameter added. | |
:param message: the message to be flashed. | |
:param category: the category for the message. The following values | |
are recommended: ``'message'`` for any kind of message, | |
``'error'`` for errors, ``'info'`` for information | |
messages and ``'warning'`` for warnings. However any | |
kind of string can be used as category. | |
""" | |
# Original implementation: | |
# | |
# session.setdefault('_flashes', []).append((category, message)) | |
# | |
# This assumed that changes made to mutable structures in the session are | |
# always in sync with the session object, which is not true for session | |
# implementations that use external storage for keeping their keys/values. | |
flashes = session.get('_flashes', []) | |
flashes.append((category, message)) | |
session['_flashes'] = flashes | |
message_flashed.send(current_app._get_current_object(), | |
message=message, category=category) | |
def get_flashed_messages(with_categories=False, category_filter=[]): | |
"""Pulls all flashed messages from the session and returns them. | |
Further calls in the same request to the function will return | |
the same messages. By default just the messages are returned, | |
but when `with_categories` is set to ``True``, the return value will | |
be a list of tuples in the form ``(category, message)`` instead. | |
Filter the flashed messages to one or more categories by providing those | |
categories in `category_filter`. This allows rendering categories in | |
separate html blocks. The `with_categories` and `category_filter` | |
arguments are distinct: | |
* `with_categories` controls whether categories are returned with message | |
text (``True`` gives a tuple, where ``False`` gives just the message text). | |
* `category_filter` filters the messages down to only those matching the | |
provided categories. | |
See :ref:`message-flashing-pattern` for examples. | |
.. versionchanged:: 0.3 | |
`with_categories` parameter added. | |
.. versionchanged:: 0.9 | |
`category_filter` parameter added. | |
:param with_categories: set to ``True`` to also receive categories. | |
:param category_filter: whitelist of categories to limit return values | |
""" | |
flashes = _request_ctx_stack.top.flashes | |
if flashes is None: | |
_request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \ | |
if '_flashes' in session else [] | |
if category_filter: | |
flashes = list(filter(lambda f: f[0] in category_filter, flashes)) | |
if not with_categories: | |
return [x[1] for x in flashes] | |
return flashes | |
def send_file(filename_or_fp, mimetype=None, as_attachment=False, | |
attachment_filename=None, add_etags=True, | |
cache_timeout=None, conditional=False, last_modified=None): | |
"""Sends the contents of a file to the client. This will use the | |
most efficient method available and configured. By default it will | |
try to use the WSGI server's file_wrapper support. Alternatively | |
you can set the application's :attr:`~Flask.use_x_sendfile` attribute | |
to ``True`` to directly emit an ``X-Sendfile`` header. This however | |
requires support of the underlying webserver for ``X-Sendfile``. | |
By default it will try to guess the mimetype for you, but you can | |
also explicitly provide one. For extra security you probably want | |
to send certain files as attachment (HTML for instance). The mimetype | |
guessing requires a `filename` or an `attachment_filename` to be | |
provided. | |
ETags will also be attached automatically if a `filename` is provided. You | |
can turn this off by setting `add_etags=False`. | |
If `conditional=True` and `filename` is provided, this method will try to | |
upgrade the response stream to support range requests. This will allow | |
the request to be answered with partial content response. | |
Please never pass filenames to this function from user sources; | |
you should use :func:`send_from_directory` instead. | |
.. versionadded:: 0.2 | |
.. versionadded:: 0.5 | |
The `add_etags`, `cache_timeout` and `conditional` parameters were | |
added. The default behavior is now to attach etags. | |
.. versionchanged:: 0.7 | |
mimetype guessing and etag support for file objects was | |
deprecated because it was unreliable. Pass a filename if you are | |
able to, otherwise attach an etag yourself. This functionality | |
will be removed in Flask 1.0 | |
.. versionchanged:: 0.9 | |
cache_timeout pulls its default from application config, when None. | |
.. versionchanged:: 0.12 | |
The filename is no longer automatically inferred from file objects. If | |
you want to use automatic mimetype and etag support, pass a filepath via | |
`filename_or_fp` or `attachment_filename`. | |
.. versionchanged:: 0.12 | |
The `attachment_filename` is preferred over `filename` for MIME-type | |
detection. | |
.. versionchanged:: 1.0 | |
UTF-8 filenames, as specified in `RFC 2231`_, are supported. | |
.. _RFC 2231: https://tools.ietf.org/html/rfc2231#section-4 | |
:param filename_or_fp: the filename of the file to send. | |
This is relative to the :attr:`~Flask.root_path` | |
if a relative path is specified. | |
Alternatively a file object might be provided in | |
which case ``X-Sendfile`` might not work and fall | |
back to the traditional method. Make sure that the | |
file pointer is positioned at the start of data to | |
send before calling :func:`send_file`. | |
:param mimetype: the mimetype of the file if provided. If a file path is | |
given, auto detection happens as fallback, otherwise an | |
error will be raised. | |
:param as_attachment: set to ``True`` if you want to send this file with | |
a ``Content-Disposition: attachment`` header. | |
:param attachment_filename: the filename for the attachment if it | |
differs from the file's filename. | |
:param add_etags: set to ``False`` to disable attaching of etags. | |
:param conditional: set to ``True`` to enable conditional responses. | |
:param cache_timeout: the timeout in seconds for the headers. When ``None`` | |
(default), this value is set by | |
:meth:`~Flask.get_send_file_max_age` of | |
:data:`~flask.current_app`. | |
:param last_modified: set the ``Last-Modified`` header to this value, | |
a :class:`~datetime.datetime` or timestamp. | |
If a file was passed, this overrides its mtime. | |
""" | |
mtime = None | |
fsize = None | |
if isinstance(filename_or_fp, string_types): | |
filename = filename_or_fp | |
if not os.path.isabs(filename): | |
filename = os.path.join(current_app.root_path, filename) | |
file = None | |
if attachment_filename is None: | |
attachment_filename = os.path.basename(filename) | |
else: | |
file = filename_or_fp | |
filename = None | |
if mimetype is None: | |
if attachment_filename is not None: | |
mimetype = mimetypes.guess_type(attachment_filename)[0] \ | |
or 'application/octet-stream' | |
if mimetype is None: | |
raise ValueError( | |
'Unable to infer MIME-type because no filename is available. ' | |
'Please set either `attachment_filename`, pass a filepath to ' | |
'`filename_or_fp` or set your own MIME-type via `mimetype`.' | |
) | |
headers = Headers() | |
if as_attachment: | |
if attachment_filename is None: | |
raise TypeError('filename unavailable, required for ' | |
'sending as attachment') | |
try: | |
attachment_filename = attachment_filename.encode('latin-1') | |
except UnicodeEncodeError: | |
filenames = { | |
'filename': unicodedata.normalize( | |
'NFKD', attachment_filename).encode('latin-1', 'ignore'), | |
'filename*': "UTF-8''%s" % url_quote(attachment_filename), | |
} | |
else: | |
filenames = {'filename': attachment_filename} | |
headers.add('Content-Disposition', 'attachment', **filenames) | |
if current_app.use_x_sendfile and filename: | |
if file is not None: | |
file.close() | |
headers['X-Sendfile'] = filename | |
fsize = os.path.getsize(filename) | |
headers['Content-Length'] = fsize | |
data = None | |
else: | |
if file is None: | |
file = open(filename, 'rb') | |
mtime = os.path.getmtime(filename) | |
fsize = os.path.getsize(filename) | |
headers['Content-Length'] = fsize | |
data = wrap_file(request.environ, file) | |
rv = current_app.response_class(data, mimetype=mimetype, headers=headers, | |
direct_passthrough=True) | |
if last_modified is not None: | |
rv.last_modified = last_modified | |
elif mtime is not None: | |
rv.last_modified = mtime | |
rv.cache_control.public = True | |
if cache_timeout is None: | |
cache_timeout = current_app.get_send_file_max_age(filename) | |
if cache_timeout is not None: | |
rv.cache_control.max_age = cache_timeout | |
rv.expires = int(time() + cache_timeout) | |
if add_etags and filename is not None: | |
from warnings import warn | |
try: | |
rv.set_etag('%s-%s-%s' % ( | |
os.path.getmtime(filename), | |
os.path.getsize(filename), | |
adler32( | |
filename.encode('utf-8') if isinstance(filename, text_type) | |
else filename | |
) & 0xffffffff | |
)) | |
except OSError: | |
warn('Access %s failed, maybe it does not exist, so ignore etags in ' | |
'headers' % filename, stacklevel=2) | |
if conditional: | |
try: | |
rv = rv.make_conditional(request, accept_ranges=True, | |
complete_length=fsize) | |
except RequestedRangeNotSatisfiable: | |
if file is not None: | |
file.close() | |
raise | |
# make sure we don't send x-sendfile for servers that | |
# ignore the 304 status code for x-sendfile. | |
if rv.status_code == 304: | |
rv.headers.pop('x-sendfile', None) | |
return rv | |
def safe_join(directory, *pathnames): | |
"""Safely join `directory` and zero or more untrusted `pathnames` | |
components. | |
Example usage:: | |
@app.route('/wiki/<path:filename>') | |
def wiki_page(filename): | |
filename = safe_join(app.config['WIKI_FOLDER'], filename) | |
with open(filename, 'rb') as fd: | |
content = fd.read() # Read and process the file content... | |
:param directory: the trusted base directory. | |
:param pathnames: the untrusted pathnames relative to that directory. | |
:raises: :class:`~werkzeug.exceptions.NotFound` if one or more passed | |
paths fall out of its boundaries. | |
""" | |
parts = [directory] | |
for filename in pathnames: | |
if filename != '': | |
filename = posixpath.normpath(filename) | |
if ( | |
any(sep in filename for sep in _os_alt_seps) | |
or os.path.isabs(filename) | |
or filename == '..' | |
or filename.startswith('../') | |
): | |
raise NotFound() | |
parts.append(filename) | |
return posixpath.join(*parts) | |
def send_from_directory(directory, filename, **options): | |
"""Send a file from a given directory with :func:`send_file`. This | |
is a secure way to quickly expose static files from an upload folder | |
or something similar. | |
Example usage:: | |
@app.route('/uploads/<path:filename>') | |
def download_file(filename): | |
return send_from_directory(app.config['UPLOAD_FOLDER'], | |
filename, as_attachment=True) | |
.. admonition:: Sending files and Performance | |
It is strongly recommended to activate either ``X-Sendfile`` support in | |
your webserver or (if no authentication happens) to tell the webserver | |
to serve files for the given path on its own without calling into the | |
web application for improved performance. | |
.. versionadded:: 0.5 | |
:param directory: the directory where all the files are stored. | |
:param filename: the filename relative to that directory to | |
download. | |
:param options: optional keyword arguments that are directly | |
forwarded to :func:`send_file`. | |
""" | |
filename = safe_join(directory, filename) | |
if not os.path.isabs(filename): | |
filename = os.path.join(current_app.root_path, filename) | |
try: | |
if not os.path.isfile(filename): | |
raise NotFound() | |
except (TypeError, ValueError): | |
raise BadRequest() | |
options.setdefault('conditional', True) | |
return send_file(filename, **options) | |
def get_root_path(import_name): | |
"""Returns the path to a package or cwd if that cannot be found. This | |
returns the path of a package or the folder that contains a module. | |
Not to be confused with the package path returned by :func:`find_package`. | |
""" | |
# Module already imported and has a file attribute. Use that first. | |
mod = sys.modules.get(import_name) | |
if mod is not None and hasattr(mod, '__file__'): | |
return os.path.dirname(os.path.abspath(mod.__file__)) | |
# Next attempt: check the loader. | |
loader = pkgutil.get_loader(import_name) | |
# Loader does not exist or we're referring to an unloaded main module | |
# or a main module without path (interactive sessions), go with the | |
# current working directory. | |
if loader is None or import_name == '__main__': | |
return os.getcwd() | |
# For .egg, zipimporter does not have get_filename until Python 2.7. | |
# Some other loaders might exhibit the same behavior. | |
if hasattr(loader, 'get_filename'): | |
filepath = loader.get_filename(import_name) | |
else: | |
# Fall back to imports. | |
__import__(import_name) | |
mod = sys.modules[import_name] | |
filepath = getattr(mod, '__file__', None) | |
# If we don't have a filepath it might be because we are a | |
# namespace package. In this case we pick the root path from the | |
# first module that is contained in our package. | |
if filepath is None: | |
raise RuntimeError('No root path can be found for the provided ' | |
'module "%s". This can happen because the ' | |
'module came from an import hook that does ' | |
'not provide file name information or because ' | |
'it\'s a namespace package. In this case ' | |
'the root path needs to be explicitly ' | |
'provided.' % import_name) | |
# filepath is import_name.py for a module, or __init__.py for a package. | |
return os.path.dirname(os.path.abspath(filepath)) | |
def _matching_loader_thinks_module_is_package(loader, mod_name): | |
"""Given the loader that loaded a module and the module this function | |
attempts to figure out if the given module is actually a package. | |
""" | |
# If the loader can tell us if something is a package, we can | |
# directly ask the loader. | |
if hasattr(loader, 'is_package'): | |
return loader.is_package(mod_name) | |
# importlib's namespace loaders do not have this functionality but | |
# all the modules it loads are packages, so we can take advantage of | |
# this information. | |
elif (loader.__class__.__module__ == '_frozen_importlib' and | |
loader.__class__.__name__ == 'NamespaceLoader'): | |
return True | |
# Otherwise we need to fail with an error that explains what went | |
# wrong. | |
raise AttributeError( | |
('%s.is_package() method is missing but is required by Flask of ' | |
'PEP 302 import hooks. If you do not use import hooks and ' | |
'you encounter this error please file a bug against Flask.') % | |
loader.__class__.__name__) | |
def find_package(import_name): | |
"""Finds a package and returns the prefix (or None if the package is | |
not installed) as well as the folder that contains the package or | |
module as a tuple. The package path returned is the module that would | |
have to be added to the pythonpath in order to make it possible to | |
import the module. The prefix is the path below which a UNIX like | |
folder structure exists (lib, share etc.). | |
""" | |
root_mod_name = import_name.split('.')[0] | |
loader = pkgutil.get_loader(root_mod_name) | |
if loader is None or import_name == '__main__': | |
# import name is not found, or interactive/main module | |
package_path = os.getcwd() | |
else: | |
# For .egg, zipimporter does not have get_filename until Python 2.7. | |
if hasattr(loader, 'get_filename'): | |
filename = loader.get_filename(root_mod_name) | |
elif hasattr(loader, 'archive'): | |
# zipimporter's loader.archive points to the .egg or .zip | |
# archive filename is dropped in call to dirname below. | |
filename = loader.archive | |
else: | |
# At least one loader is missing both get_filename and archive: | |
# Google App Engine's HardenedModulesHook | |
# | |
# Fall back to imports. | |
__import__(import_name) | |
filename = sys.modules[import_name].__file__ | |
package_path = os.path.abspath(os.path.dirname(filename)) | |
# In case the root module is a package we need to chop of the | |
# rightmost part. This needs to go through a helper function | |
# because of python 3.3 namespace packages. | |
if _matching_loader_thinks_module_is_package( | |
loader, root_mod_name): | |
package_path = os.path.dirname(package_path) | |
site_parent, site_folder = os.path.split(package_path) | |
py_prefix = os.path.abspath(sys.prefix) | |
if package_path.startswith(py_prefix): | |
return py_prefix, package_path | |
elif site_folder.lower() == 'site-packages': | |
parent, folder = os.path.split(site_parent) | |
# Windows like installations | |
if folder.lower() == 'lib': | |
base_dir = parent | |
# UNIX like installations | |
elif os.path.basename(parent).lower() == 'lib': | |
base_dir = os.path.dirname(parent) | |
else: | |
base_dir = site_parent | |
return base_dir, package_path | |
return None, package_path | |
class locked_cached_property(object): | |
"""A decorator that converts a function into a lazy property. The | |
function wrapped is called the first time to retrieve the result | |
and then that calculated result is used the next time you access | |
the value. Works like the one in Werkzeug but has a lock for | |
thread safety. | |
""" | |
def __init__(self, func, name=None, doc=None): | |
self.__name__ = name or func.__name__ | |
self.__module__ = func.__module__ | |
self.__doc__ = doc or func.__doc__ | |
self.func = func | |
self.lock = RLock() | |
def __get__(self, obj, type=None): | |
if obj is None: | |
return self | |
with self.lock: | |
value = obj.__dict__.get(self.__name__, _missing) | |
if value is _missing: | |
value = self.func(obj) | |
obj.__dict__[self.__name__] = value | |
return value | |
class _PackageBoundObject(object): | |
#: The name of the package or module that this app belongs to. Do not | |
#: change this once it is set by the constructor. | |
import_name = None | |
#: Location of the template files to be added to the template lookup. | |
#: ``None`` if templates should not be added. | |
template_folder = None | |
#: Absolute path to the package on the filesystem. Used to look up | |
#: resources contained in the package. | |
root_path = None | |
def __init__(self, import_name, template_folder=None, root_path=None): | |
self.import_name = import_name | |
self.template_folder = template_folder | |
if root_path is None: | |
root_path = get_root_path(self.import_name) | |
self.root_path = root_path | |
self._static_folder = None | |
self._static_url_path = None | |
def _get_static_folder(self): | |
if self._static_folder is not None: | |
return os.path.join(self.root_path, self._static_folder) | |
def _set_static_folder(self, value): | |
self._static_folder = value | |
static_folder = property( | |
_get_static_folder, _set_static_folder, | |
doc='The absolute path to the configured static folder.' | |
) | |
del _get_static_folder, _set_static_folder | |
def _get_static_url_path(self): | |
if self._static_url_path is not None: | |
return self._static_url_path | |
if self.static_folder is not None: | |
return '/' + os.path.basename(self.static_folder) | |
def _set_static_url_path(self, value): | |
self._static_url_path = value | |
static_url_path = property( | |
_get_static_url_path, _set_static_url_path, | |
doc='The URL prefix that the static route will be registered for.' | |
) | |
del _get_static_url_path, _set_static_url_path | |
@property | |
def has_static_folder(self): | |
"""This is ``True`` if the package bound object's container has a | |
folder for static files. | |
.. versionadded:: 0.5 | |
""" | |
return self.static_folder is not None | |
@locked_cached_property | |
def jinja_loader(self): | |
"""The Jinja loader for this package bound object. | |
.. versionadded:: 0.5 | |
""" | |
if self.template_folder is not None: | |
return FileSystemLoader(os.path.join(self.root_path, | |
self.template_folder)) | |
def get_send_file_max_age(self, filename): | |
"""Provides default cache_timeout for the :func:`send_file` functions. | |
By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from | |
the configuration of :data:`~flask.current_app`. | |
Static file functions such as :func:`send_from_directory` use this | |
function, and :func:`send_file` calls this function on | |
:data:`~flask.current_app` when the given cache_timeout is ``None``. If a | |
cache_timeout is given in :func:`send_file`, that timeout is used; | |
otherwise, this method is called. | |
This allows subclasses to change the behavior when sending files based | |
on the filename. For example, to set the cache timeout for .js files | |
to 60 seconds:: | |
class MyFlask(flask.Flask): | |
def get_send_file_max_age(self, name): | |
if name.lower().endswith('.js'): | |
return 60 | |
return flask.Flask.get_send_file_max_age(self, name) | |
.. versionadded:: 0.9 | |
""" | |
return total_seconds(current_app.send_file_max_age_default) | |
def send_static_file(self, filename): | |
"""Function used internally to send static files from the static | |
folder to the browser. | |
.. versionadded:: 0.5 | |
""" | |
if not self.has_static_folder: | |
raise RuntimeError('No static folder for this object') | |
# Ensure get_send_file_max_age is called in all cases. | |
# Here, we ensure get_send_file_max_age is called for Blueprints. | |
cache_timeout = self.get_send_file_max_age(filename) | |
return send_from_directory(self.static_folder, filename, | |
cache_timeout=cache_timeout) | |
def open_resource(self, resource, mode='rb'): | |
"""Opens a resource from the application's resource folder. To see | |
how this works, consider the following folder structure:: | |
/myapplication.py | |
/schema.sql | |
/static | |
/style.css | |
/templates | |
/layout.html | |
/index.html | |
If you want to open the :file:`schema.sql` file you would do the | |
following:: | |
with app.open_resource('schema.sql') as f: | |
contents = f.read() | |
do_something_with(contents) | |
:param resource: the name of the resource. To access resources within | |
subfolders use forward slashes as separator. | |
:param mode: resource file opening mode, default is 'rb'. | |
""" | |
if mode not in ('r', 'rb'): | |
raise ValueError('Resources can only be opened for reading') | |
return open(os.path.join(self.root_path, resource), mode) | |
def total_seconds(td): | |
"""Returns the total seconds from a timedelta object. | |
:param timedelta td: the timedelta to be converted in seconds | |
:returns: number of seconds | |
:rtype: int | |
""" | |
return td.days * 60 * 60 * 24 + td.seconds | |
def is_ip(value): | |
"""Determine if the given string is an IP address. | |
Python 2 on Windows doesn't provide ``inet_pton``, so this only | |
checks IPv4 addresses in that environment. | |
:param value: value to check | |
:type value: str | |
:return: True if string is an IP address | |
:rtype: bool | |
""" | |
if PY2 and os.name == 'nt': | |
try: | |
socket.inet_aton(value) | |
return True | |
except socket.error: | |
return False | |
for family in (socket.AF_INET, socket.AF_INET6): | |
try: | |
socket.inet_pton(family, value) | |
except socket.error: | |
pass | |
else: | |
return True | |
return False |
# -*- coding: utf-8 -*- | |
""" | |
flask.json | |
~~~~~~~~~~ | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import codecs | |
import io | |
import uuid | |
from datetime import date, datetime | |
from flask.globals import current_app, request | |
from flask._compat import text_type, PY2 | |
from werkzeug.http import http_date | |
from jinja2 import Markup | |
# Use the same json implementation as itsdangerous on which we | |
# depend anyways. | |
from itsdangerous import json as _json | |
# Figure out if simplejson escapes slashes. This behavior was changed | |
# from one version to another without reason. | |
_slash_escape = '\\/' not in _json.dumps('/') | |
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump', | |
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder', | |
'jsonify'] | |
def _wrap_reader_for_text(fp, encoding): | |
if isinstance(fp.read(0), bytes): | |
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) | |
return fp | |
def _wrap_writer_for_text(fp, encoding): | |
try: | |
fp.write('') | |
except TypeError: | |
fp = io.TextIOWrapper(fp, encoding) | |
return fp | |
class JSONEncoder(_json.JSONEncoder): | |
"""The default Flask JSON encoder. This one extends the default simplejson | |
encoder by also supporting ``datetime`` objects, ``UUID`` as well as | |
``Markup`` objects which are serialized as RFC 822 datetime strings (same | |
as the HTTP date format). In order to support more data types override the | |
:meth:`default` method. | |
""" | |
def default(self, o): | |
"""Implement this method in a subclass such that it returns a | |
serializable object for ``o``, or calls the base implementation (to | |
raise a :exc:`TypeError`). | |
For example, to support arbitrary iterators, you could implement | |
default like this:: | |
def default(self, o): | |
try: | |
iterable = iter(o) | |
except TypeError: | |
pass | |
else: | |
return list(iterable) | |
return JSONEncoder.default(self, o) | |
""" | |
if isinstance(o, datetime): | |
return http_date(o.utctimetuple()) | |
if isinstance(o, date): | |
return http_date(o.timetuple()) | |
if isinstance(o, uuid.UUID): | |
return str(o) | |
if hasattr(o, '__html__'): | |
return text_type(o.__html__()) | |
return _json.JSONEncoder.default(self, o) | |
class JSONDecoder(_json.JSONDecoder): | |
"""The default JSON decoder. This one does not change the behavior from | |
the default simplejson decoder. Consult the :mod:`json` documentation | |
for more information. This decoder is not only used for the load | |
functions of this module but also :attr:`~flask.Request`. | |
""" | |
def _dump_arg_defaults(kwargs): | |
"""Inject default arguments for dump functions.""" | |
if current_app: | |
bp = current_app.blueprints.get(request.blueprint) if request else None | |
kwargs.setdefault( | |
'cls', | |
bp.json_encoder if bp and bp.json_encoder | |
else current_app.json_encoder | |
) | |
if not current_app.config['JSON_AS_ASCII']: | |
kwargs.setdefault('ensure_ascii', False) | |
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS']) | |
else: | |
kwargs.setdefault('sort_keys', True) | |
kwargs.setdefault('cls', JSONEncoder) | |
def _load_arg_defaults(kwargs): | |
"""Inject default arguments for load functions.""" | |
if current_app: | |
bp = current_app.blueprints.get(request.blueprint) if request else None | |
kwargs.setdefault( | |
'cls', | |
bp.json_decoder if bp and bp.json_decoder | |
else current_app.json_decoder | |
) | |
else: | |
kwargs.setdefault('cls', JSONDecoder) | |
def detect_encoding(data): | |
"""Detect which UTF codec was used to encode the given bytes. | |
The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is | |
accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big | |
or little endian. Some editors or libraries may prepend a BOM. | |
:param data: Bytes in unknown UTF encoding. | |
:return: UTF encoding name | |
""" | |
head = data[:4] | |
if head[:3] == codecs.BOM_UTF8: | |
return 'utf-8-sig' | |
if b'\x00' not in head: | |
return 'utf-8' | |
if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): | |
return 'utf-32' | |
if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): | |
return 'utf-16' | |
if len(head) == 4: | |
if head[:3] == b'\x00\x00\x00': | |
return 'utf-32-be' | |
if head[::2] == b'\x00\x00': | |
return 'utf-16-be' | |
if head[1:] == b'\x00\x00\x00': | |
return 'utf-32-le' | |
if head[1::2] == b'\x00\x00': | |
return 'utf-16-le' | |
if len(head) == 2: | |
return 'utf-16-be' if head.startswith(b'\x00') else 'utf-16-le' | |
return 'utf-8' | |
def dumps(obj, **kwargs): | |
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's | |
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an | |
application on the stack. | |
This function can return ``unicode`` strings or ascii-only bytestrings by | |
default which coerce into unicode strings automatically. That behavior by | |
default is controlled by the ``JSON_AS_ASCII`` configuration variable | |
and can be overridden by the simplejson ``ensure_ascii`` parameter. | |
""" | |
_dump_arg_defaults(kwargs) | |
encoding = kwargs.pop('encoding', None) | |
rv = _json.dumps(obj, **kwargs) | |
if encoding is not None and isinstance(rv, text_type): | |
rv = rv.encode(encoding) | |
return rv | |
def dump(obj, fp, **kwargs): | |
"""Like :func:`dumps` but writes into a file object.""" | |
_dump_arg_defaults(kwargs) | |
encoding = kwargs.pop('encoding', None) | |
if encoding is not None: | |
fp = _wrap_writer_for_text(fp, encoding) | |
_json.dump(obj, fp, **kwargs) | |
def loads(s, **kwargs): | |
"""Unserialize a JSON object from a string ``s`` by using the application's | |
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an | |
application on the stack. | |
""" | |
_load_arg_defaults(kwargs) | |
if isinstance(s, bytes): | |
encoding = kwargs.pop('encoding', None) | |
if encoding is None: | |
encoding = detect_encoding(s) | |
s = s.decode(encoding) | |
return _json.loads(s, **kwargs) | |
def load(fp, **kwargs): | |
"""Like :func:`loads` but reads from a file object. | |
""" | |
_load_arg_defaults(kwargs) | |
if not PY2: | |
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8') | |
return _json.load(fp, **kwargs) | |
def htmlsafe_dumps(obj, **kwargs): | |
"""Works exactly like :func:`dumps` but is safe for use in ``<script>`` | |
tags. It accepts the same arguments and returns a JSON string. Note that | |
this is available in templates through the ``|tojson`` filter which will | |
also mark the result as safe. Due to how this function escapes certain | |
characters this is safe even if used outside of ``<script>`` tags. | |
The following characters are escaped in strings: | |
- ``<`` | |
- ``>`` | |
- ``&`` | |
- ``'`` | |
This makes it safe to embed such strings in any place in HTML with the | |
notable exception of double quoted attributes. In that case single | |
quote your attributes or HTML escape it in addition. | |
.. versionchanged:: 0.10 | |
This function's return value is now always safe for HTML usage, even | |
if outside of script tags or if used in XHTML. This rule does not | |
hold true when using this function in HTML attributes that are double | |
quoted. Always single quote attributes if you use the ``|tojson`` | |
filter. Alternatively use ``|tojson|forceescape``. | |
""" | |
rv = dumps(obj, **kwargs) \ | |
.replace(u'<', u'\\u003c') \ | |
.replace(u'>', u'\\u003e') \ | |
.replace(u'&', u'\\u0026') \ | |
.replace(u"'", u'\\u0027') | |
if not _slash_escape: | |
rv = rv.replace('\\/', '/') | |
return rv | |
def htmlsafe_dump(obj, fp, **kwargs): | |
"""Like :func:`htmlsafe_dumps` but writes into a file object.""" | |
fp.write(text_type(htmlsafe_dumps(obj, **kwargs))) | |
def jsonify(*args, **kwargs): | |
"""This function wraps :func:`dumps` to add a few enhancements that make | |
life easier. It turns the JSON output into a :class:`~flask.Response` | |
object with the :mimetype:`application/json` mimetype. For convenience, it | |
also converts multiple arguments into an array or multiple keyword arguments | |
into a dict. This means that both ``jsonify(1,2,3)`` and | |
``jsonify([1,2,3])`` serialize to ``[1,2,3]``. | |
For clarity, the JSON serialization behavior has the following differences | |
from :func:`dumps`: | |
1. Single argument: Passed straight through to :func:`dumps`. | |
2. Multiple arguments: Converted to an array before being passed to | |
:func:`dumps`. | |
3. Multiple keyword arguments: Converted to a dict before being passed to | |
:func:`dumps`. | |
4. Both args and kwargs: Behavior undefined and will throw an exception. | |
Example usage:: | |
from flask import jsonify | |
@app.route('/_get_current_user') | |
def get_current_user(): | |
return jsonify(username=g.user.username, | |
email=g.user.email, | |
id=g.user.id) | |
This will send a JSON response like this to the browser:: | |
{ | |
"username": "admin", | |
"email": "admin@localhost", | |
"id": 42 | |
} | |
.. versionchanged:: 0.11 | |
Added support for serializing top-level arrays. This introduces a | |
security risk in ancient browsers. See :ref:`json-security` for details. | |
This function's response will be pretty printed if the | |
``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to True or the | |
Flask app is running in debug mode. Compressed (not pretty) formatting | |
currently means no indents and no spaces after separators. | |
.. versionadded:: 0.2 | |
""" | |
indent = None | |
separators = (',', ':') | |
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] or current_app.debug: | |
indent = 2 | |
separators = (', ', ': ') | |
if args and kwargs: | |
raise TypeError('jsonify() behavior undefined when passed both args and kwargs') | |
elif len(args) == 1: # single args are passed directly to dumps() | |
data = args[0] | |
else: | |
data = args or kwargs | |
return current_app.response_class( | |
dumps(data, indent=indent, separators=separators) + '\n', | |
mimetype=current_app.config['JSONIFY_MIMETYPE'] | |
) | |
def tojson_filter(obj, **kwargs): | |
return Markup(htmlsafe_dumps(obj, **kwargs)) |
# -*- coding: utf-8 -*- | |
""" | |
Tagged JSON | |
~~~~~~~~~~~ | |
A compact representation for lossless serialization of non-standard JSON types. | |
:class:`~flask.sessions.SecureCookieSessionInterface` uses this to serialize | |
the session data, but it may be useful in other places. It can be extended to | |
support other types. | |
.. autoclass:: TaggedJSONSerializer | |
:members: | |
.. autoclass:: JSONTag | |
:members: | |
Let's seen an example that adds support for :class:`~collections.OrderedDict`. | |
Dicts don't have an order in Python or JSON, so to handle this we will dump | |
the items as a list of ``[key, value]`` pairs. Subclass :class:`JSONTag` and | |
give it the new key ``' od'`` to identify the type. The session serializer | |
processes dicts first, so insert the new tag at the front of the order since | |
``OrderedDict`` must be processed before ``dict``. :: | |
from flask.json.tag import JSONTag | |
class TagOrderedDict(JSONTag): | |
__slots__ = ('serializer',) | |
key = ' od' | |
def check(self, value): | |
return isinstance(value, OrderedDict) | |
def to_json(self, value): | |
return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] | |
def to_python(self, value): | |
return OrderedDict(value) | |
app.session_interface.serializer.register(TagOrderedDict, index=0) | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from base64 import b64decode, b64encode | |
from datetime import datetime | |
from uuid import UUID | |
from jinja2 import Markup | |
from werkzeug.http import http_date, parse_date | |
from flask._compat import iteritems, text_type | |
from flask.json import dumps, loads | |
class JSONTag(object): | |
"""Base class for defining type tags for :class:`TaggedJSONSerializer`.""" | |
__slots__ = ('serializer',) | |
#: The tag to mark the serialized object with. If ``None``, this tag is | |
#: only used as an intermediate step during tagging. | |
key = None | |
def __init__(self, serializer): | |
"""Create a tagger for the given serializer.""" | |
self.serializer = serializer | |
def check(self, value): | |
"""Check if the given value should be tagged by this tag.""" | |
raise NotImplementedError | |
def to_json(self, value): | |
"""Convert the Python object to an object that is a valid JSON type. | |
The tag will be added later.""" | |
raise NotImplementedError | |
def to_python(self, value): | |
"""Convert the JSON representation back to the correct type. The tag | |
will already be removed.""" | |
raise NotImplementedError | |
def tag(self, value): | |
"""Convert the value to a valid JSON type and add the tag structure | |
around it.""" | |
return {self.key: self.to_json(value)} | |
class TagDict(JSONTag): | |
"""Tag for 1-item dicts whose only key matches a registered tag. | |
Internally, the dict key is suffixed with `__`, and the suffix is removed | |
when deserializing. | |
""" | |
__slots__ = () | |
key = ' di' | |
def check(self, value): | |
return ( | |
isinstance(value, dict) | |
and len(value) == 1 | |
and next(iter(value)) in self.serializer.tags | |
) | |
def to_json(self, value): | |
key = next(iter(value)) | |
return {key + '__': self.serializer.tag(value[key])} | |
def to_python(self, value): | |
key = next(iter(value)) | |
return {key[:-2]: value[key]} | |
class PassDict(JSONTag): | |
__slots__ = () | |
def check(self, value): | |
return isinstance(value, dict) | |
def to_json(self, value): | |
# JSON objects may only have string keys, so don't bother tagging the | |
# key here. | |
return dict((k, self.serializer.tag(v)) for k, v in iteritems(value)) | |
tag = to_json | |
class TagTuple(JSONTag): | |
__slots__ = () | |
key = ' t' | |
def check(self, value): | |
return isinstance(value, tuple) | |
def to_json(self, value): | |
return [self.serializer.tag(item) for item in value] | |
def to_python(self, value): | |
return tuple(value) | |
class PassList(JSONTag): | |
__slots__ = () | |
def check(self, value): | |
return isinstance(value, list) | |
def to_json(self, value): | |
return [self.serializer.tag(item) for item in value] | |
tag = to_json | |
class TagBytes(JSONTag): | |
__slots__ = () | |
key = ' b' | |
def check(self, value): | |
return isinstance(value, bytes) | |
def to_json(self, value): | |
return b64encode(value).decode('ascii') | |
def to_python(self, value): | |
return b64decode(value) | |
class TagMarkup(JSONTag): | |
"""Serialize anything matching the :class:`~flask.Markup` API by | |
having a ``__html__`` method to the result of that method. Always | |
deserializes to an instance of :class:`~flask.Markup`.""" | |
__slots__ = () | |
key = ' m' | |
def check(self, value): | |
return callable(getattr(value, '__html__', None)) | |
def to_json(self, value): | |
return text_type(value.__html__()) | |
def to_python(self, value): | |
return Markup(value) | |
class TagUUID(JSONTag): | |
__slots__ = () | |
key = ' u' | |
def check(self, value): | |
return isinstance(value, UUID) | |
def to_json(self, value): | |
return value.hex | |
def to_python(self, value): | |
return UUID(value) | |
class TagDateTime(JSONTag): | |
__slots__ = () | |
key = ' d' | |
def check(self, value): | |
return isinstance(value, datetime) | |
def to_json(self, value): | |
return http_date(value) | |
def to_python(self, value): | |
return parse_date(value) | |
class TaggedJSONSerializer(object): | |
"""Serializer that uses a tag system to compactly represent objects that | |
are not JSON types. Passed as the intermediate serializer to | |
:class:`itsdangerous.Serializer`. | |
The following extra types are supported: | |
* :class:`dict` | |
* :class:`tuple` | |
* :class:`bytes` | |
* :class:`~flask.Markup` | |
* :class:`~uuid.UUID` | |
* :class:`~datetime.datetime` | |
""" | |
__slots__ = ('tags', 'order') | |
#: Tag classes to bind when creating the serializer. Other tags can be | |
#: added later using :meth:`~register`. | |
default_tags = [ | |
TagDict, PassDict, TagTuple, PassList, TagBytes, TagMarkup, TagUUID, | |
TagDateTime, | |
] | |
def __init__(self): | |
self.tags = {} | |
self.order = [] | |
for cls in self.default_tags: | |
self.register(cls) | |
def register(self, tag_class, force=False, index=None): | |
"""Register a new tag with this serializer. | |
:param tag_class: tag class to register. Will be instantiated with this | |
serializer instance. | |
:param force: overwrite an existing tag. If false (default), a | |
:exc:`KeyError` is raised. | |
:param index: index to insert the new tag in the tag order. Useful when | |
the new tag is a special case of an existing tag. If ``None`` | |
(default), the tag is appended to the end of the order. | |
:raise KeyError: if the tag key is already registered and ``force`` is | |
not true. | |
""" | |
tag = tag_class(self) | |
key = tag.key | |
if key is not None: | |
if not force and key in self.tags: | |
raise KeyError("Tag '{0}' is already registered.".format(key)) | |
self.tags[key] = tag | |
if index is None: | |
self.order.append(tag) | |
else: | |
self.order.insert(index, tag) | |
def tag(self, value): | |
"""Convert a value to a tagged representation if necessary.""" | |
for tag in self.order: | |
if tag.check(value): | |
return tag.tag(value) | |
return value | |
def untag(self, value): | |
"""Convert a tagged representation back to the original type.""" | |
if len(value) != 1: | |
return value | |
key = next(iter(value)) | |
if key not in self.tags: | |
return value | |
return self.tags[key].to_python(value[key]) | |
def dumps(self, value): | |
"""Tag the value and dump it to a compact JSON string.""" | |
return dumps(self.tag(value), separators=(',', ':')) | |
def loads(self, value): | |
"""Load data from a JSON string and deserialized any tagged objects.""" | |
return loads(value, object_hook=self.untag) |
# -*- coding: utf-8 -*- | |
""" | |
flask.logging | |
~~~~~~~~~~~~~ | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from __future__ import absolute_import | |
import logging | |
import sys | |
from werkzeug.local import LocalProxy | |
from .globals import request | |
@LocalProxy | |
def wsgi_errors_stream(): | |
"""Find the most appropriate error stream for the application. If a request | |
is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. | |
If you configure your own :class:`logging.StreamHandler`, you may want to | |
use this for the stream. If you are using file or dict configuration and | |
can't import this directly, you can refer to it as | |
``ext://flask.logging.wsgi_errors_stream``. | |
""" | |
return request.environ['wsgi.errors'] if request else sys.stderr | |
def has_level_handler(logger): | |
"""Check if there is a handler in the logging chain that will handle the | |
given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. | |
""" | |
level = logger.getEffectiveLevel() | |
current = logger | |
while current: | |
if any(handler.level <= level for handler in current.handlers): | |
return True | |
if not current.propagate: | |
break | |
current = current.parent | |
return False | |
#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format | |
#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. | |
default_handler = logging.StreamHandler(wsgi_errors_stream) | |
default_handler.setFormatter(logging.Formatter( | |
'[%(asctime)s] %(levelname)s in %(module)s: %(message)s' | |
)) | |
def create_logger(app): | |
"""Get the ``'flask.app'`` logger and configure it if needed. | |
When :attr:`~flask.Flask.debug` is enabled, set the logger level to | |
:data:`logging.DEBUG` if it is not set. | |
If there is no handler for the logger's effective level, add a | |
:class:`~logging.StreamHandler` for | |
:func:`~flask.logging.wsgi_errors_stream` with a basic format. | |
""" | |
logger = logging.getLogger('flask.app') | |
if app.debug and logger.level == logging.NOTSET: | |
logger.setLevel(logging.DEBUG) | |
if not has_level_handler(logger): | |
logger.addHandler(default_handler) | |
return logger |
# -*- coding: utf-8 -*- | |
""" | |
flask.sessions | |
~~~~~~~~~~~~~~ | |
Implements cookie based sessions based on itsdangerous. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import hashlib | |
import warnings | |
from collections import MutableMapping | |
from datetime import datetime | |
from itsdangerous import BadSignature, URLSafeTimedSerializer | |
from werkzeug.datastructures import CallbackDict | |
from flask.helpers import is_ip, total_seconds | |
from flask.json.tag import TaggedJSONSerializer | |
class SessionMixin(MutableMapping): | |
"""Expands a basic dictionary with session attributes.""" | |
@property | |
def permanent(self): | |
"""This reflects the ``'_permanent'`` key in the dict.""" | |
return self.get('_permanent', False) | |
@permanent.setter | |
def permanent(self, value): | |
self['_permanent'] = bool(value) | |
#: Some implementations can detect whether a session is newly | |
#: created, but that is not guaranteed. Use with caution. The mixin | |
# default is hard-coded ``False``. | |
new = False | |
#: Some implementations can detect changes to the session and set | |
#: this when that happens. The mixin default is hard coded to | |
#: ``True``. | |
modified = True | |
#: Some implementations can detect when session data is read or | |
#: written and set this when that happens. The mixin default is hard | |
#: coded to ``True``. | |
accessed = True | |
class SecureCookieSession(CallbackDict, SessionMixin): | |
"""Base class for sessions based on signed cookies. | |
This session backend will set the :attr:`modified` and | |
:attr:`accessed` attributes. It cannot reliably track whether a | |
session is new (vs. empty), so :attr:`new` remains hard coded to | |
``False``. | |
""" | |
#: When data is changed, this is set to ``True``. Only the session | |
#: dictionary itself is tracked; if the session contains mutable | |
#: data (for example a nested dict) then this must be set to | |
#: ``True`` manually when modifying that data. The session cookie | |
#: will only be written to the response if this is ``True``. | |
modified = False | |
#: When data is read or written, this is set to ``True``. Used by | |
# :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` | |
#: header, which allows caching proxies to cache different pages for | |
#: different users. | |
accessed = False | |
def __init__(self, initial=None): | |
def on_update(self): | |
self.modified = True | |
self.accessed = True | |
super(SecureCookieSession, self).__init__(initial, on_update) | |
def __getitem__(self, key): | |
self.accessed = True | |
return super(SecureCookieSession, self).__getitem__(key) | |
def get(self, key, default=None): | |
self.accessed = True | |
return super(SecureCookieSession, self).get(key, default) | |
def setdefault(self, key, default=None): | |
self.accessed = True | |
return super(SecureCookieSession, self).setdefault(key, default) | |
class NullSession(SecureCookieSession): | |
"""Class used to generate nicer error messages if sessions are not | |
available. Will still allow read-only access to the empty session | |
but fail on setting. | |
""" | |
def _fail(self, *args, **kwargs): | |
raise RuntimeError('The session is unavailable because no secret ' | |
'key was set. Set the secret_key on the ' | |
'application to something unique and secret.') | |
__setitem__ = __delitem__ = clear = pop = popitem = \ | |
update = setdefault = _fail | |
del _fail | |
class SessionInterface(object): | |
"""The basic interface you have to implement in order to replace the | |
default session interface which uses werkzeug's securecookie | |
implementation. The only methods you have to implement are | |
:meth:`open_session` and :meth:`save_session`, the others have | |
useful defaults which you don't need to change. | |
The session object returned by the :meth:`open_session` method has to | |
provide a dictionary like interface plus the properties and methods | |
from the :class:`SessionMixin`. We recommend just subclassing a dict | |
and adding that mixin:: | |
class Session(dict, SessionMixin): | |
pass | |
If :meth:`open_session` returns ``None`` Flask will call into | |
:meth:`make_null_session` to create a session that acts as replacement | |
if the session support cannot work because some requirement is not | |
fulfilled. The default :class:`NullSession` class that is created | |
will complain that the secret key was not set. | |
To replace the session interface on an application all you have to do | |
is to assign :attr:`flask.Flask.session_interface`:: | |
app = Flask(__name__) | |
app.session_interface = MySessionInterface() | |
.. versionadded:: 0.8 | |
""" | |
#: :meth:`make_null_session` will look here for the class that should | |
#: be created when a null session is requested. Likewise the | |
#: :meth:`is_null_session` method will perform a typecheck against | |
#: this type. | |
null_session_class = NullSession | |
#: A flag that indicates if the session interface is pickle based. | |
#: This can be used by Flask extensions to make a decision in regards | |
#: to how to deal with the session object. | |
#: | |
#: .. versionadded:: 0.10 | |
pickle_based = False | |
def make_null_session(self, app): | |
"""Creates a null session which acts as a replacement object if the | |
real session support could not be loaded due to a configuration | |
error. This mainly aids the user experience because the job of the | |
null session is to still support lookup without complaining but | |
modifications are answered with a helpful error message of what | |
failed. | |
This creates an instance of :attr:`null_session_class` by default. | |
""" | |
return self.null_session_class() | |
def is_null_session(self, obj): | |
"""Checks if a given object is a null session. Null sessions are | |
not asked to be saved. | |
This checks if the object is an instance of :attr:`null_session_class` | |
by default. | |
""" | |
return isinstance(obj, self.null_session_class) | |
def get_cookie_domain(self, app): | |
"""Returns the domain that should be set for the session cookie. | |
Uses ``SESSION_COOKIE_DOMAIN`` if it is configured, otherwise | |
falls back to detecting the domain based on ``SERVER_NAME``. | |
Once detected (or if not set at all), ``SESSION_COOKIE_DOMAIN`` is | |
updated to avoid re-running the logic. | |
""" | |
rv = app.config['SESSION_COOKIE_DOMAIN'] | |
# set explicitly, or cached from SERVER_NAME detection | |
# if False, return None | |
if rv is not None: | |
return rv if rv else None | |
rv = app.config['SERVER_NAME'] | |
# server name not set, cache False to return none next time | |
if not rv: | |
app.config['SESSION_COOKIE_DOMAIN'] = False | |
return None | |
# chop off the port which is usually not supported by browsers | |
# remove any leading '.' since we'll add that later | |
rv = rv.rsplit(':', 1)[0].lstrip('.') | |
if '.' not in rv: | |
# Chrome doesn't allow names without a '.' | |
# this should only come up with localhost | |
# hack around this by not setting the name, and show a warning | |
warnings.warn( | |
'"{rv}" is not a valid cookie domain, it must contain a ".".' | |
' Add an entry to your hosts file, for example' | |
' "{rv}.localdomain", and use that instead.'.format(rv=rv) | |
) | |
app.config['SESSION_COOKIE_DOMAIN'] = False | |
return None | |
ip = is_ip(rv) | |
if ip: | |
warnings.warn( | |
'The session cookie domain is an IP address. This may not work' | |
' as intended in some browsers. Add an entry to your hosts' | |
' file, for example "localhost.localdomain", and use that' | |
' instead.' | |
) | |
# if this is not an ip and app is mounted at the root, allow subdomain | |
# matching by adding a '.' prefix | |
if self.get_cookie_path(app) == '/' and not ip: | |
rv = '.' + rv | |
app.config['SESSION_COOKIE_DOMAIN'] = rv | |
return rv | |
def get_cookie_path(self, app): | |
"""Returns the path for which the cookie should be valid. The | |
default implementation uses the value from the ``SESSION_COOKIE_PATH`` | |
config var if it's set, and falls back to ``APPLICATION_ROOT`` or | |
uses ``/`` if it's ``None``. | |
""" | |
return app.config['SESSION_COOKIE_PATH'] \ | |
or app.config['APPLICATION_ROOT'] | |
def get_cookie_httponly(self, app): | |
"""Returns True if the session cookie should be httponly. This | |
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` | |
config var. | |
""" | |
return app.config['SESSION_COOKIE_HTTPONLY'] | |
def get_cookie_secure(self, app): | |
"""Returns True if the cookie should be secure. This currently | |
just returns the value of the ``SESSION_COOKIE_SECURE`` setting. | |
""" | |
return app.config['SESSION_COOKIE_SECURE'] | |
def get_cookie_samesite(self, app): | |
"""Return ``'Strict'`` or ``'Lax'`` if the cookie should use the | |
``SameSite`` attribute. This currently just returns the value of | |
the :data:`SESSION_COOKIE_SAMESITE` setting. | |
""" | |
return app.config['SESSION_COOKIE_SAMESITE'] | |
def get_expiration_time(self, app, session): | |
"""A helper method that returns an expiration date for the session | |
or ``None`` if the session is linked to the browser session. The | |
default implementation returns now + the permanent session | |
lifetime configured on the application. | |
""" | |
if session.permanent: | |
return datetime.utcnow() + app.permanent_session_lifetime | |
def should_set_cookie(self, app, session): | |
"""Used by session backends to determine if a ``Set-Cookie`` header | |
should be set for this session cookie for this response. If the session | |
has been modified, the cookie is set. If the session is permanent and | |
the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is | |
always set. | |
This check is usually skipped if the session was deleted. | |
.. versionadded:: 0.11 | |
""" | |
return session.modified or ( | |
session.permanent and app.config['SESSION_REFRESH_EACH_REQUEST'] | |
) | |
def open_session(self, app, request): | |
"""This method has to be implemented and must either return ``None`` | |
in case the loading failed because of a configuration error or an | |
instance of a session object which implements a dictionary like | |
interface + the methods and attributes on :class:`SessionMixin`. | |
""" | |
raise NotImplementedError() | |
def save_session(self, app, session, response): | |
"""This is called for actual sessions returned by :meth:`open_session` | |
at the end of the request. This is still called during a request | |
context so if you absolutely need access to the request you can do | |
that. | |
""" | |
raise NotImplementedError() | |
session_json_serializer = TaggedJSONSerializer() | |
class SecureCookieSessionInterface(SessionInterface): | |
"""The default session interface that stores sessions in signed cookies | |
through the :mod:`itsdangerous` module. | |
""" | |
#: the salt that should be applied on top of the secret key for the | |
#: signing of cookie based sessions. | |
salt = 'cookie-session' | |
#: the hash function to use for the signature. The default is sha1 | |
digest_method = staticmethod(hashlib.sha1) | |
#: the name of the itsdangerous supported key derivation. The default | |
#: is hmac. | |
key_derivation = 'hmac' | |
#: A python serializer for the payload. The default is a compact | |
#: JSON derived serializer with support for some extra Python types | |
#: such as datetime objects or tuples. | |
serializer = session_json_serializer | |
session_class = SecureCookieSession | |
def get_signing_serializer(self, app): | |
if not app.secret_key: | |
return None | |
signer_kwargs = dict( | |
key_derivation=self.key_derivation, | |
digest_method=self.digest_method | |
) | |
return URLSafeTimedSerializer(app.secret_key, salt=self.salt, | |
serializer=self.serializer, | |
signer_kwargs=signer_kwargs) | |
def open_session(self, app, request): | |
s = self.get_signing_serializer(app) | |
if s is None: | |
return None | |
val = request.cookies.get(app.session_cookie_name) | |
if not val: | |
return self.session_class() | |
max_age = total_seconds(app.permanent_session_lifetime) | |
try: | |
data = s.loads(val, max_age=max_age) | |
return self.session_class(data) | |
except BadSignature: | |
return self.session_class() | |
def save_session(self, app, session, response): | |
domain = self.get_cookie_domain(app) | |
path = self.get_cookie_path(app) | |
# If the session is modified to be empty, remove the cookie. | |
# If the session is empty, return without setting the cookie. | |
if not session: | |
if session.modified: | |
response.delete_cookie( | |
app.session_cookie_name, | |
domain=domain, | |
path=path | |
) | |
return | |
# Add a "Vary: Cookie" header if the session was accessed at all. | |
if session.accessed: | |
response.vary.add('Cookie') | |
if not self.should_set_cookie(app, session): | |
return | |
httponly = self.get_cookie_httponly(app) | |
secure = self.get_cookie_secure(app) | |
samesite = self.get_cookie_samesite(app) | |
expires = self.get_expiration_time(app, session) | |
val = self.get_signing_serializer(app).dumps(dict(session)) | |
response.set_cookie( | |
app.session_cookie_name, | |
val, | |
expires=expires, | |
httponly=httponly, | |
domain=domain, | |
path=path, | |
secure=secure, | |
samesite=samesite | |
) |
# -*- coding: utf-8 -*- | |
""" | |
flask.signals | |
~~~~~~~~~~~~~ | |
Implements signals based on blinker if available, otherwise | |
falls silently back to a noop. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
signals_available = False | |
try: | |
from blinker import Namespace | |
signals_available = True | |
except ImportError: | |
class Namespace(object): | |
def signal(self, name, doc=None): | |
return _FakeSignal(name, doc) | |
class _FakeSignal(object): | |
"""If blinker is unavailable, create a fake class with the same | |
interface that allows sending of signals but will fail with an | |
error on anything else. Instead of doing anything on send, it | |
will just ignore the arguments and do nothing instead. | |
""" | |
def __init__(self, name, doc=None): | |
self.name = name | |
self.__doc__ = doc | |
def _fail(self, *args, **kwargs): | |
raise RuntimeError('signalling support is unavailable ' | |
'because the blinker library is ' | |
'not installed.') | |
send = lambda *a, **kw: None | |
connect = disconnect = has_receivers_for = receivers_for = \ | |
temporarily_connected_to = connected_to = _fail | |
del _fail | |
# The namespace for code signals. If you are not Flask code, do | |
# not put signals in here. Create your own namespace instead. | |
_signals = Namespace() | |
# Core signals. For usage examples grep the source code or consult | |
# the API documentation in docs/api.rst as well as docs/signals.rst | |
template_rendered = _signals.signal('template-rendered') | |
before_render_template = _signals.signal('before-render-template') | |
request_started = _signals.signal('request-started') | |
request_finished = _signals.signal('request-finished') | |
request_tearing_down = _signals.signal('request-tearing-down') | |
got_request_exception = _signals.signal('got-request-exception') | |
appcontext_tearing_down = _signals.signal('appcontext-tearing-down') | |
appcontext_pushed = _signals.signal('appcontext-pushed') | |
appcontext_popped = _signals.signal('appcontext-popped') | |
message_flashed = _signals.signal('message-flashed') |
# -*- coding: utf-8 -*- | |
""" | |
flask.templating | |
~~~~~~~~~~~~~~~~ | |
Implements the bridge to Jinja2. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from jinja2 import BaseLoader, Environment as BaseEnvironment, \ | |
TemplateNotFound | |
from .globals import _request_ctx_stack, _app_ctx_stack | |
from .signals import template_rendered, before_render_template | |
def _default_template_ctx_processor(): | |
"""Default template context processor. Injects `request`, | |
`session` and `g`. | |
""" | |
reqctx = _request_ctx_stack.top | |
appctx = _app_ctx_stack.top | |
rv = {} | |
if appctx is not None: | |
rv['g'] = appctx.g | |
if reqctx is not None: | |
rv['request'] = reqctx.request | |
rv['session'] = reqctx.session | |
return rv | |
class Environment(BaseEnvironment): | |
"""Works like a regular Jinja2 environment but has some additional | |
knowledge of how Flask's blueprint works so that it can prepend the | |
name of the blueprint to referenced templates if necessary. | |
""" | |
def __init__(self, app, **options): | |
if 'loader' not in options: | |
options['loader'] = app.create_global_jinja_loader() | |
BaseEnvironment.__init__(self, **options) | |
self.app = app | |
class DispatchingJinjaLoader(BaseLoader): | |
"""A loader that looks for templates in the application and all | |
the blueprint folders. | |
""" | |
def __init__(self, app): | |
self.app = app | |
def get_source(self, environment, template): | |
if self.app.config['EXPLAIN_TEMPLATE_LOADING']: | |
return self._get_source_explained(environment, template) | |
return self._get_source_fast(environment, template) | |
def _get_source_explained(self, environment, template): | |
attempts = [] | |
trv = None | |
for srcobj, loader in self._iter_loaders(template): | |
try: | |
rv = loader.get_source(environment, template) | |
if trv is None: | |
trv = rv | |
except TemplateNotFound: | |
rv = None | |
attempts.append((loader, srcobj, rv)) | |
from .debughelpers import explain_template_loading_attempts | |
explain_template_loading_attempts(self.app, template, attempts) | |
if trv is not None: | |
return trv | |
raise TemplateNotFound(template) | |
def _get_source_fast(self, environment, template): | |
for srcobj, loader in self._iter_loaders(template): | |
try: | |
return loader.get_source(environment, template) | |
except TemplateNotFound: | |
continue | |
raise TemplateNotFound(template) | |
def _iter_loaders(self, template): | |
loader = self.app.jinja_loader | |
if loader is not None: | |
yield self.app, loader | |
for blueprint in self.app.iter_blueprints(): | |
loader = blueprint.jinja_loader | |
if loader is not None: | |
yield blueprint, loader | |
def list_templates(self): | |
result = set() | |
loader = self.app.jinja_loader | |
if loader is not None: | |
result.update(loader.list_templates()) | |
for blueprint in self.app.iter_blueprints(): | |
loader = blueprint.jinja_loader | |
if loader is not None: | |
for template in loader.list_templates(): | |
result.add(template) | |
return list(result) | |
def _render(template, context, app): | |
"""Renders the template and fires the signal""" | |
before_render_template.send(app, template=template, context=context) | |
rv = template.render(context) | |
template_rendered.send(app, template=template, context=context) | |
return rv | |
def render_template(template_name_or_list, **context): | |
"""Renders a template from the template folder with the given | |
context. | |
:param template_name_or_list: the name of the template to be | |
rendered, or an iterable with template names | |
the first one existing will be rendered | |
:param context: the variables that should be available in the | |
context of the template. | |
""" | |
ctx = _app_ctx_stack.top | |
ctx.app.update_template_context(context) | |
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list), | |
context, ctx.app) | |
def render_template_string(source, **context): | |
"""Renders a template from the given template source string | |
with the given context. Template variables will be autoescaped. | |
:param source: the source code of the template to be | |
rendered | |
:param context: the variables that should be available in the | |
context of the template. | |
""" | |
ctx = _app_ctx_stack.top | |
ctx.app.update_template_context(context) | |
return _render(ctx.app.jinja_env.from_string(source), | |
context, ctx.app) |
# -*- coding: utf-8 -*- | |
""" | |
flask.testing | |
~~~~~~~~~~~~~ | |
Implements test support helpers. This module is lazily imported | |
and usually not used in production environments. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import werkzeug | |
from contextlib import contextmanager | |
from click.testing import CliRunner | |
from flask.cli import ScriptInfo | |
from werkzeug.test import Client, EnvironBuilder | |
from flask import _request_ctx_stack | |
from flask.json import dumps as json_dumps | |
from werkzeug.urls import url_parse | |
def make_test_environ_builder( | |
app, path='/', base_url=None, subdomain=None, url_scheme=None, | |
*args, **kwargs | |
): | |
"""Create a :class:`~werkzeug.test.EnvironBuilder`, taking some | |
defaults from the application. | |
:param app: The Flask application to configure the environment from. | |
:param path: URL path being requested. | |
:param base_url: Base URL where the app is being served, which | |
``path`` is relative to. If not given, built from | |
:data:`PREFERRED_URL_SCHEME`, ``subdomain``, | |
:data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. | |
:param subdomain: Subdomain name to append to :data:`SERVER_NAME`. | |
:param url_scheme: Scheme to use instead of | |
:data:`PREFERRED_URL_SCHEME`. | |
:param json: If given, this is serialized as JSON and passed as | |
``data``. Also defaults ``content_type`` to | |
``application/json``. | |
:param args: other positional arguments passed to | |
:class:`~werkzeug.test.EnvironBuilder`. | |
:param kwargs: other keyword arguments passed to | |
:class:`~werkzeug.test.EnvironBuilder`. | |
""" | |
assert ( | |
not (base_url or subdomain or url_scheme) | |
or (base_url is not None) != bool(subdomain or url_scheme) | |
), 'Cannot pass "subdomain" or "url_scheme" with "base_url".' | |
if base_url is None: | |
http_host = app.config.get('SERVER_NAME') or 'localhost' | |
app_root = app.config['APPLICATION_ROOT'] | |
if subdomain: | |
http_host = '{0}.{1}'.format(subdomain, http_host) | |
if url_scheme is None: | |
url_scheme = app.config['PREFERRED_URL_SCHEME'] | |
url = url_parse(path) | |
base_url = '{scheme}://{netloc}/{path}'.format( | |
scheme=url.scheme or url_scheme, | |
netloc=url.netloc or http_host, | |
path=app_root.lstrip('/') | |
) | |
path = url.path | |
if url.query: | |
sep = b'?' if isinstance(url.query, bytes) else '?' | |
path += sep + url.query | |
if 'json' in kwargs: | |
assert 'data' not in kwargs, ( | |
"Client cannot provide both 'json' and 'data'." | |
) | |
# push a context so flask.json can use app's json attributes | |
with app.app_context(): | |
kwargs['data'] = json_dumps(kwargs.pop('json')) | |
if 'content_type' not in kwargs: | |
kwargs['content_type'] = 'application/json' | |
return EnvironBuilder(path, base_url, *args, **kwargs) | |
class FlaskClient(Client): | |
"""Works like a regular Werkzeug test client but has some knowledge about | |
how Flask works to defer the cleanup of the request context stack to the | |
end of a ``with`` body when used in a ``with`` statement. For general | |
information about how to use this class refer to | |
:class:`werkzeug.test.Client`. | |
.. versionchanged:: 0.12 | |
`app.test_client()` includes preset default environment, which can be | |
set after instantiation of the `app.test_client()` object in | |
`client.environ_base`. | |
Basic usage is outlined in the :ref:`testing` chapter. | |
""" | |
preserve_context = False | |
def __init__(self, *args, **kwargs): | |
super(FlaskClient, self).__init__(*args, **kwargs) | |
self.environ_base = { | |
"REMOTE_ADDR": "127.0.0.1", | |
"HTTP_USER_AGENT": "werkzeug/" + werkzeug.__version__ | |
} | |
@contextmanager | |
def session_transaction(self, *args, **kwargs): | |
"""When used in combination with a ``with`` statement this opens a | |
session transaction. This can be used to modify the session that | |
the test client uses. Once the ``with`` block is left the session is | |
stored back. | |
:: | |
with client.session_transaction() as session: | |
session['value'] = 42 | |
Internally this is implemented by going through a temporary test | |
request context and since session handling could depend on | |
request variables this function accepts the same arguments as | |
:meth:`~flask.Flask.test_request_context` which are directly | |
passed through. | |
""" | |
if self.cookie_jar is None: | |
raise RuntimeError('Session transactions only make sense ' | |
'with cookies enabled.') | |
app = self.application | |
environ_overrides = kwargs.setdefault('environ_overrides', {}) | |
self.cookie_jar.inject_wsgi(environ_overrides) | |
outer_reqctx = _request_ctx_stack.top | |
with app.test_request_context(*args, **kwargs) as c: | |
session_interface = app.session_interface | |
sess = session_interface.open_session(app, c.request) | |
if sess is None: | |
raise RuntimeError('Session backend did not open a session. ' | |
'Check the configuration') | |
# Since we have to open a new request context for the session | |
# handling we want to make sure that we hide out own context | |
# from the caller. By pushing the original request context | |
# (or None) on top of this and popping it we get exactly that | |
# behavior. It's important to not use the push and pop | |
# methods of the actual request context object since that would | |
# mean that cleanup handlers are called | |
_request_ctx_stack.push(outer_reqctx) | |
try: | |
yield sess | |
finally: | |
_request_ctx_stack.pop() | |
resp = app.response_class() | |
if not session_interface.is_null_session(sess): | |
session_interface.save_session(app, sess, resp) | |
headers = resp.get_wsgi_headers(c.request.environ) | |
self.cookie_jar.extract_wsgi(c.request.environ, headers) | |
def open(self, *args, **kwargs): | |
as_tuple = kwargs.pop('as_tuple', False) | |
buffered = kwargs.pop('buffered', False) | |
follow_redirects = kwargs.pop('follow_redirects', False) | |
if ( | |
not kwargs and len(args) == 1 | |
and isinstance(args[0], (EnvironBuilder, dict)) | |
): | |
environ = self.environ_base.copy() | |
if isinstance(args[0], EnvironBuilder): | |
environ.update(args[0].get_environ()) | |
else: | |
environ.update(args[0]) | |
environ['flask._preserve_context'] = self.preserve_context | |
else: | |
kwargs.setdefault('environ_overrides', {}) \ | |
['flask._preserve_context'] = self.preserve_context | |
kwargs.setdefault('environ_base', self.environ_base) | |
builder = make_test_environ_builder( | |
self.application, *args, **kwargs | |
) | |
try: | |
environ = builder.get_environ() | |
finally: | |
builder.close() | |
return Client.open( | |
self, environ, | |
as_tuple=as_tuple, | |
buffered=buffered, | |
follow_redirects=follow_redirects | |
) | |
def __enter__(self): | |
if self.preserve_context: | |
raise RuntimeError('Cannot nest client invocations') | |
self.preserve_context = True | |
return self | |
def __exit__(self, exc_type, exc_value, tb): | |
self.preserve_context = False | |
# on exit we want to clean up earlier. Normally the request context | |
# stays preserved until the next request in the same thread comes | |
# in. See RequestGlobals.push() for the general behavior. | |
top = _request_ctx_stack.top | |
if top is not None and top.preserved: | |
top.pop() | |
class FlaskCliRunner(CliRunner): | |
"""A :class:`~click.testing.CliRunner` for testing a Flask app's | |
CLI commands. Typically created using | |
:meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. | |
""" | |
def __init__(self, app, **kwargs): | |
self.app = app | |
super(FlaskCliRunner, self).__init__(**kwargs) | |
def invoke(self, cli=None, args=None, **kwargs): | |
"""Invokes a CLI command in an isolated environment. See | |
:meth:`CliRunner.invoke <click.testing.CliRunner.invoke>` for | |
full method documentation. See :ref:`testing-cli` for examples. | |
If the ``obj`` argument is not given, passes an instance of | |
:class:`~flask.cli.ScriptInfo` that knows how to load the Flask | |
app being tested. | |
:param cli: Command object to invoke. Default is the app's | |
:attr:`~flask.app.Flask.cli` group. | |
:param args: List of strings to invoke the command with. | |
:return: a :class:`~click.testing.Result` object. | |
""" | |
if cli is None: | |
cli = self.app.cli | |
if 'obj' not in kwargs: | |
kwargs['obj'] = ScriptInfo(create_app=lambda: self.app) | |
return super(FlaskCliRunner, self).invoke(cli, args, **kwargs) |
# -*- coding: utf-8 -*- | |
""" | |
flask.views | |
~~~~~~~~~~~ | |
This module provides class-based views inspired by the ones in Django. | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from .globals import request | |
from ._compat import with_metaclass | |
http_method_funcs = frozenset(['get', 'post', 'head', 'options', | |
'delete', 'put', 'trace', 'patch']) | |
class View(object): | |
"""Alternative way to use view functions. A subclass has to implement | |
:meth:`dispatch_request` which is called with the view arguments from | |
the URL routing system. If :attr:`methods` is provided the methods | |
do not have to be passed to the :meth:`~flask.Flask.add_url_rule` | |
method explicitly:: | |
class MyView(View): | |
methods = ['GET'] | |
def dispatch_request(self, name): | |
return 'Hello %s!' % name | |
app.add_url_rule('/hello/<name>', view_func=MyView.as_view('myview')) | |
When you want to decorate a pluggable view you will have to either do that | |
when the view function is created (by wrapping the return value of | |
:meth:`as_view`) or you can use the :attr:`decorators` attribute:: | |
class SecretView(View): | |
methods = ['GET'] | |
decorators = [superuser_required] | |
def dispatch_request(self): | |
... | |
The decorators stored in the decorators list are applied one after another | |
when the view function is created. Note that you can *not* use the class | |
based decorators since those would decorate the view class and not the | |
generated view function! | |
""" | |
#: A list of methods this view can handle. | |
methods = None | |
#: Setting this disables or force-enables the automatic options handling. | |
provide_automatic_options = None | |
#: The canonical way to decorate class-based views is to decorate the | |
#: return value of as_view(). However since this moves parts of the | |
#: logic from the class declaration to the place where it's hooked | |
#: into the routing system. | |
#: | |
#: You can place one or more decorators in this list and whenever the | |
#: view function is created the result is automatically decorated. | |
#: | |
#: .. versionadded:: 0.8 | |
decorators = () | |
def dispatch_request(self): | |
"""Subclasses have to override this method to implement the | |
actual view function code. This method is called with all | |
the arguments from the URL rule. | |
""" | |
raise NotImplementedError() | |
@classmethod | |
def as_view(cls, name, *class_args, **class_kwargs): | |
"""Converts the class into an actual view function that can be used | |
with the routing system. Internally this generates a function on the | |
fly which will instantiate the :class:`View` on each request and call | |
the :meth:`dispatch_request` method on it. | |
The arguments passed to :meth:`as_view` are forwarded to the | |
constructor of the class. | |
""" | |
def view(*args, **kwargs): | |
self = view.view_class(*class_args, **class_kwargs) | |
return self.dispatch_request(*args, **kwargs) | |
if cls.decorators: | |
view.__name__ = name | |
view.__module__ = cls.__module__ | |
for decorator in cls.decorators: | |
view = decorator(view) | |
# We attach the view class to the view function for two reasons: | |
# first of all it allows us to easily figure out what class-based | |
# view this thing came from, secondly it's also used for instantiating | |
# the view class so you can actually replace it with something else | |
# for testing purposes and debugging. | |
view.view_class = cls | |
view.__name__ = name | |
view.__doc__ = cls.__doc__ | |
view.__module__ = cls.__module__ | |
view.methods = cls.methods | |
view.provide_automatic_options = cls.provide_automatic_options | |
return view | |
class MethodViewType(type): | |
"""Metaclass for :class:`MethodView` that determines what methods the view | |
defines. | |
""" | |
def __init__(cls, name, bases, d): | |
super(MethodViewType, cls).__init__(name, bases, d) | |
if 'methods' not in d: | |
methods = set() | |
for key in http_method_funcs: | |
if hasattr(cls, key): | |
methods.add(key.upper()) | |
# If we have no method at all in there we don't want to add a | |
# method list. This is for instance the case for the base class | |
# or another subclass of a base method view that does not introduce | |
# new methods. | |
if methods: | |
cls.methods = methods | |
class MethodView(with_metaclass(MethodViewType, View)): | |
"""A class-based view that dispatches request methods to the corresponding | |
class methods. For example, if you implement a ``get`` method, it will be | |
used to handle ``GET`` requests. :: | |
class CounterAPI(MethodView): | |
def get(self): | |
return session.get('counter', 0) | |
def post(self): | |
session['counter'] = session.get('counter', 0) + 1 | |
return 'OK' | |
app.add_url_rule('/counter', view_func=CounterAPI.as_view('counter')) | |
""" | |
def dispatch_request(self, *args, **kwargs): | |
meth = getattr(self, request.method.lower(), None) | |
# If the request method is HEAD and we don't have a handler for it | |
# retry with GET. | |
if meth is None and request.method == 'HEAD': | |
meth = getattr(self, 'get', None) | |
assert meth is not None, 'Unimplemented method %r' % request.method | |
return meth(*args, **kwargs) |
# -*- coding: utf-8 -*- | |
""" | |
flask.wrappers | |
~~~~~~~~~~~~~~ | |
Implements the WSGI wrappers (request and response). | |
:copyright: © 2010 by the Pallets team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from werkzeug.exceptions import BadRequest | |
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase | |
from flask import json | |
from flask.globals import current_app | |
class JSONMixin(object): | |
"""Common mixin for both request and response objects to provide JSON | |
parsing capabilities. | |
.. versionadded:: 1.0 | |
""" | |
_cached_json = (Ellipsis, Ellipsis) | |
@property | |
def is_json(self): | |
"""Check if the mimetype indicates JSON data, either | |
:mimetype:`application/json` or :mimetype:`application/*+json`. | |
.. versionadded:: 0.11 | |
""" | |
mt = self.mimetype | |
return ( | |
mt == 'application/json' | |
or (mt.startswith('application/')) and mt.endswith('+json') | |
) | |
@property | |
def json(self): | |
"""This will contain the parsed JSON data if the mimetype indicates | |
JSON (:mimetype:`application/json`, see :meth:`is_json`), otherwise it | |
will be ``None``. | |
""" | |
return self.get_json() | |
def _get_data_for_json(self, cache): | |
return self.get_data(cache=cache) | |
def get_json(self, force=False, silent=False, cache=True): | |
"""Parse and return the data as JSON. If the mimetype does not | |
indicate JSON (:mimetype:`application/json`, see | |
:meth:`is_json`), this returns ``None`` unless ``force`` is | |
true. If parsing fails, :meth:`on_json_loading_failed` is called | |
and its return value is used as the return value. | |
:param force: Ignore the mimetype and always try to parse JSON. | |
:param silent: Silence parsing errors and return ``None`` | |
instead. | |
:param cache: Store the parsed JSON to return for subsequent | |
calls. | |
""" | |
if cache and self._cached_json[silent] is not Ellipsis: | |
return self._cached_json[silent] | |
if not (force or self.is_json): | |
return None | |
data = self._get_data_for_json(cache=cache) | |
try: | |
rv = json.loads(data) | |
except ValueError as e: | |
if silent: | |
rv = None | |
if cache: | |
normal_rv, _ = self._cached_json | |
self._cached_json = (normal_rv, rv) | |
else: | |
rv = self.on_json_loading_failed(e) | |
if cache: | |
_, silent_rv = self._cached_json | |
self._cached_json = (rv, silent_rv) | |
else: | |
if cache: | |
self._cached_json = (rv, rv) | |
return rv | |
def on_json_loading_failed(self, e): | |
"""Called if :meth:`get_json` parsing fails and isn't silenced. If | |
this method returns a value, it is used as the return value for | |
:meth:`get_json`. The default implementation raises a | |
:class:`BadRequest` exception. | |
.. versionchanged:: 0.10 | |
Raise a :exc:`BadRequest` error instead of returning an error | |
message as JSON. If you want that behavior you can add it by | |
subclassing. | |
.. versionadded:: 0.8 | |
""" | |
if current_app is not None and current_app.debug: | |
raise BadRequest('Failed to decode JSON object: {0}'.format(e)) | |
raise BadRequest() | |
class Request(RequestBase, JSONMixin): | |
"""The request object used by default in Flask. Remembers the | |
matched endpoint and view arguments. | |
It is what ends up as :class:`~flask.request`. If you want to replace | |
the request object used you can subclass this and set | |
:attr:`~flask.Flask.request_class` to your subclass. | |
The request object is a :class:`~werkzeug.wrappers.Request` subclass and | |
provides all of the attributes Werkzeug defines plus a few Flask | |
specific ones. | |
""" | |
#: The internal URL rule that matched the request. This can be | |
#: useful to inspect which methods are allowed for the URL from | |
#: a before/after handler (``request.url_rule.methods``) etc. | |
#: Though if the request's method was invalid for the URL rule, | |
#: the valid list is available in ``routing_exception.valid_methods`` | |
#: instead (an attribute of the Werkzeug exception :exc:`~werkzeug.exceptions.MethodNotAllowed`) | |
#: because the request was never internally bound. | |
#: | |
#: .. versionadded:: 0.6 | |
url_rule = None | |
#: A dict of view arguments that matched the request. If an exception | |
#: happened when matching, this will be ``None``. | |
view_args = None | |
#: If matching the URL failed, this is the exception that will be | |
#: raised / was raised as part of the request handling. This is | |
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or | |
#: something similar. | |
routing_exception = None | |
@property | |
def max_content_length(self): | |
"""Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" | |
if current_app: | |
return current_app.config['MAX_CONTENT_LENGTH'] | |
@property | |
def endpoint(self): | |
"""The endpoint that matched the request. This in combination with | |
:attr:`view_args` can be used to reconstruct the same or a | |
modified URL. If an exception happened when matching, this will | |
be ``None``. | |
""" | |
if self.url_rule is not None: | |
return self.url_rule.endpoint | |
@property | |
def blueprint(self): | |
"""The name of the current blueprint""" | |
if self.url_rule and '.' in self.url_rule.endpoint: | |
return self.url_rule.endpoint.rsplit('.', 1)[0] | |
def _load_form_data(self): | |
RequestBase._load_form_data(self) | |
# In debug mode we're replacing the files multidict with an ad-hoc | |
# subclass that raises a different error for key errors. | |
if ( | |
current_app | |
and current_app.debug | |
and self.mimetype != 'multipart/form-data' | |
and not self.files | |
): | |
from .debughelpers import attach_enctype_error_multidict | |
attach_enctype_error_multidict(self) | |
class Response(ResponseBase, JSONMixin): | |
"""The response object that is used by default in Flask. Works like the | |
response object from Werkzeug but is set to have an HTML mimetype by | |
default. Quite often you don't have to create this object yourself because | |
:meth:`~flask.Flask.make_response` will take care of that for you. | |
If you want to replace the response object used you can subclass this and | |
set :attr:`~flask.Flask.response_class` to your subclass. | |
.. versionchanged:: 1.0 | |
JSON support is added to the response, like the request. This is useful | |
when testing to get the test client response data as JSON. | |
.. versionchanged:: 1.0 | |
Added :attr:`max_cookie_size`. | |
""" | |
default_mimetype = 'text/html' | |
def _get_data_for_json(self, cache): | |
return self.get_data() | |
@property | |
def max_cookie_size(self): | |
"""Read-only view of the :data:`MAX_COOKIE_SIZE` config key. | |
See :attr:`~werkzeug.wrappers.BaseResponse.max_cookie_size` in | |
Werkzeug's docs. | |
""" | |
if current_app: | |
return current_app.config['MAX_COOKIE_SIZE'] | |
# return Werkzeug's default when not in an app context | |
return super(Response, self).max_cookie_size |
..\__pycache__\itsdangerous.cpython-37.pyc | |
..\itsdangerous.py | |
PKG-INFO | |
SOURCES.txt | |
dependency_links.txt | |
not-zip-safe | |
top_level.txt |
Metadata-Version: 1.1 | |
Name: itsdangerous | |
Version: 0.24 | |
Summary: Various helpers to pass trusted data to untrusted environments and back. | |
Home-page: http://github.com/mitsuhiko/itsdangerous | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
License: UNKNOWN | |
Description: UNKNOWN | |
Platform: UNKNOWN | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 3 |
CHANGES | |
LICENSE | |
MANIFEST.in | |
Makefile | |
README | |
itsdangerous.py | |
setup.cfg | |
setup.py | |
tests.py | |
tox.ini | |
docs/Makefile | |
docs/conf.py | |
docs/index.rst | |
docs/make.bat | |
docs/_static/itsdangerous.png | |
docs/_themes/.gitignore | |
docs/_themes/LICENSE | |
docs/_themes/README | |
docs/_themes/flask_theme_support.py | |
docs/_themes/flask_small/layout.html | |
docs/_themes/flask_small/theme.conf | |
docs/_themes/flask_small/static/flasky.css_t | |
itsdangerous.egg-info/PKG-INFO | |
itsdangerous.egg-info/SOURCES.txt | |
itsdangerous.egg-info/dependency_links.txt | |
itsdangerous.egg-info/not-zip-safe | |
itsdangerous.egg-info/top_level.txt |
itsdangerous |
# -*- coding: utf-8 -*- | |
""" | |
itsdangerous | |
~~~~~~~~~~~~ | |
A module that implements various functions to deal with untrusted | |
sources. Mainly useful for web applications. | |
:copyright: (c) 2014 by Armin Ronacher and the Django Software Foundation. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import sys | |
import hmac | |
import zlib | |
import time | |
import base64 | |
import hashlib | |
import operator | |
from datetime import datetime | |
PY2 = sys.version_info[0] == 2 | |
if PY2: | |
from itertools import izip | |
text_type = unicode | |
int_to_byte = chr | |
number_types = (int, long, float) | |
else: | |
from functools import reduce | |
izip = zip | |
text_type = str | |
int_to_byte = operator.methodcaller('to_bytes', 1, 'big') | |
number_types = (int, float) | |
try: | |
import simplejson as json | |
except ImportError: | |
import json | |
class _CompactJSON(object): | |
"""Wrapper around simplejson that strips whitespace. | |
""" | |
def loads(self, payload): | |
return json.loads(payload) | |
def dumps(self, obj): | |
return json.dumps(obj, separators=(',', ':')) | |
compact_json = _CompactJSON() | |
# 2011/01/01 in UTC | |
EPOCH = 1293840000 | |
def want_bytes(s, encoding='utf-8', errors='strict'): | |
if isinstance(s, text_type): | |
s = s.encode(encoding, errors) | |
return s | |
def is_text_serializer(serializer): | |
"""Checks wheather a serializer generates text or binary.""" | |
return isinstance(serializer.dumps({}), text_type) | |
# Starting with 3.3 the standard library has a c-implementation for | |
# constant time string compares. | |
_builtin_constant_time_compare = getattr(hmac, 'compare_digest', None) | |
def constant_time_compare(val1, val2): | |
"""Returns True if the two strings are equal, False otherwise. | |
The time taken is independent of the number of characters that match. Do | |
not use this function for anything else than comparision with known | |
length targets. | |
This is should be implemented in C in order to get it completely right. | |
""" | |
if _builtin_constant_time_compare is not None: | |
return _builtin_constant_time_compare(val1, val2) | |
len_eq = len(val1) == len(val2) | |
if len_eq: | |
result = 0 | |
left = val1 | |
else: | |
result = 1 | |
left = val2 | |
for x, y in izip(bytearray(left), bytearray(val2)): | |
result |= x ^ y | |
return result == 0 | |
class BadData(Exception): | |
"""Raised if bad data of any sort was encountered. This is the | |
base for all exceptions that itsdangerous is currently using. | |
.. versionadded:: 0.15 | |
""" | |
message = None | |
def __init__(self, message): | |
Exception.__init__(self, message) | |
self.message = message | |
def __str__(self): | |
return text_type(self.message) | |
if PY2: | |
__unicode__ = __str__ | |
def __str__(self): | |
return self.__unicode__().encode('utf-8') | |
class BadPayload(BadData): | |
"""This error is raised in situations when payload is loaded without | |
checking the signature first and an exception happend as a result of | |
that. The original exception that caused that will be stored on the | |
exception as :attr:`original_error`. | |
This can also happen with a :class:`JSONWebSignatureSerializer` that | |
is subclassed and uses a different serializer for the payload than | |
the expected one. | |
.. versionadded:: 0.15 | |
""" | |
def __init__(self, message, original_error=None): | |
BadData.__init__(self, message) | |
#: If available, the error that indicates why the payload | |
#: was not valid. This might be `None`. | |
self.original_error = original_error | |
class BadSignature(BadData): | |
"""This error is raised if a signature does not match. As of | |
itsdangerous 0.14 there are helpful attributes on the exception | |
instances. You can also catch down the baseclass :exc:`BadData`. | |
""" | |
def __init__(self, message, payload=None): | |
BadData.__init__(self, message) | |
#: The payload that failed the signature test. In some | |
#: situations you might still want to inspect this, even if | |
#: you know it was tampered with. | |
#: | |
#: .. versionadded:: 0.14 | |
self.payload = payload | |
class BadTimeSignature(BadSignature): | |
"""Raised for time based signatures that fail. This is a subclass | |
of :class:`BadSignature` so you can catch those down as well. | |
""" | |
def __init__(self, message, payload=None, date_signed=None): | |
BadSignature.__init__(self, message, payload) | |
#: If the signature expired this exposes the date of when the | |
#: signature was created. This can be helpful in order to | |
#: tell the user how long a link has been gone stale. | |
#: | |
#: .. versionadded:: 0.14 | |
self.date_signed = date_signed | |
class BadHeader(BadSignature): | |
"""Raised if a signed header is invalid in some form. This only | |
happens for serializers that have a header that goes with the | |
signature. | |
.. versionadded:: 0.24 | |
""" | |
def __init__(self, message, payload=None, header=None, | |
original_error=None): | |
BadSignature.__init__(self, message, payload) | |
#: If the header is actually available but just malformed it | |
#: might be stored here. | |
self.header = header | |
#: If available, the error that indicates why the payload | |
#: was not valid. This might be `None`. | |
self.original_error = original_error | |
class SignatureExpired(BadTimeSignature): | |
"""Signature timestamp is older than required max_age. This is a | |
subclass of :exc:`BadTimeSignature` so you can use the baseclass for | |
catching the error. | |
""" | |
def base64_encode(string): | |
"""base64 encodes a single bytestring (and is tolerant to getting | |
called with a unicode string). | |
The resulting bytestring is safe for putting into URLs. | |
""" | |
string = want_bytes(string) | |
return base64.urlsafe_b64encode(string).strip(b'=') | |
def base64_decode(string): | |
"""base64 decodes a single bytestring (and is tolerant to getting | |
called with a unicode string). | |
The result is also a bytestring. | |
""" | |
string = want_bytes(string, encoding='ascii', errors='ignore') | |
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4)) | |
def int_to_bytes(num): | |
assert num >= 0 | |
rv = [] | |
while num: | |
rv.append(int_to_byte(num & 0xff)) | |
num >>= 8 | |
return b''.join(reversed(rv)) | |
def bytes_to_int(bytestr): | |
return reduce(lambda a, b: a << 8 | b, bytearray(bytestr), 0) | |
class SigningAlgorithm(object): | |
"""Subclasses of `SigningAlgorithm` have to implement `get_signature` to | |
provide signature generation functionality. | |
""" | |
def get_signature(self, key, value): | |
"""Returns the signature for the given key and value""" | |
raise NotImplementedError() | |
def verify_signature(self, key, value, sig): | |
"""Verifies the given signature matches the expected signature""" | |
return constant_time_compare(sig, self.get_signature(key, value)) | |
class NoneAlgorithm(SigningAlgorithm): | |
"""This class provides a algorithm that does not perform any signing and | |
returns an empty signature. | |
""" | |
def get_signature(self, key, value): | |
return b'' | |
class HMACAlgorithm(SigningAlgorithm): | |
"""This class provides signature generation using HMACs.""" | |
#: The digest method to use with the MAC algorithm. This defaults to sha1 | |
#: but can be changed for any other function in the hashlib module. | |
default_digest_method = staticmethod(hashlib.sha1) | |
def __init__(self, digest_method=None): | |
if digest_method is None: | |
digest_method = self.default_digest_method | |
self.digest_method = digest_method | |
def get_signature(self, key, value): | |
mac = hmac.new(key, msg=value, digestmod=self.digest_method) | |
return mac.digest() | |
class Signer(object): | |
"""This class can sign bytes and unsign it and validate the signature | |
provided. | |
Salt can be used to namespace the hash, so that a signed string is only | |
valid for a given namespace. Leaving this at the default value or re-using | |
a salt value across different parts of your application where the same | |
signed value in one part can mean something different in another part | |
is a security risk. | |
See :ref:`the-salt` for an example of what the salt is doing and how you | |
can utilize it. | |
.. versionadded:: 0.14 | |
`key_derivation` and `digest_method` were added as arguments to the | |
class constructor. | |
.. versionadded:: 0.18 | |
`algorithm` was added as an argument to the class constructor. | |
""" | |
#: The digest method to use for the signer. This defaults to sha1 but can | |
#: be changed for any other function in the hashlib module. | |
#: | |
#: .. versionchanged:: 0.14 | |
default_digest_method = staticmethod(hashlib.sha1) | |
#: Controls how the key is derived. The default is Django style | |
#: concatenation. Possible values are ``concat``, ``django-concat`` | |
#: and ``hmac``. This is used for deriving a key from the secret key | |
#: with an added salt. | |
#: | |
#: .. versionadded:: 0.14 | |
default_key_derivation = 'django-concat' | |
def __init__(self, secret_key, salt=None, sep='.', key_derivation=None, | |
digest_method=None, algorithm=None): | |
self.secret_key = want_bytes(secret_key) | |
self.sep = sep | |
self.salt = 'itsdangerous.Signer' if salt is None else salt | |
if key_derivation is None: | |
key_derivation = self.default_key_derivation | |
self.key_derivation = key_derivation | |
if digest_method is None: | |
digest_method = self.default_digest_method | |
self.digest_method = digest_method | |
if algorithm is None: | |
algorithm = HMACAlgorithm(self.digest_method) | |
self.algorithm = algorithm | |
def derive_key(self): | |
"""This method is called to derive the key. If you're unhappy with | |
the default key derivation choices you can override them here. | |
Keep in mind that the key derivation in itsdangerous is not intended | |
to be used as a security method to make a complex key out of a short | |
password. Instead you should use large random secret keys. | |
""" | |
salt = want_bytes(self.salt) | |
if self.key_derivation == 'concat': | |
return self.digest_method(salt + self.secret_key).digest() | |
elif self.key_derivation == 'django-concat': | |
return self.digest_method(salt + b'signer' + | |
self.secret_key).digest() | |
elif self.key_derivation == 'hmac': | |
mac = hmac.new(self.secret_key, digestmod=self.digest_method) | |
mac.update(salt) | |
return mac.digest() | |
elif self.key_derivation == 'none': | |
return self.secret_key | |
else: | |
raise TypeError('Unknown key derivation method') | |
def get_signature(self, value): | |
"""Returns the signature for the given value""" | |
value = want_bytes(value) | |
key = self.derive_key() | |
sig = self.algorithm.get_signature(key, value) | |
return base64_encode(sig) | |
def sign(self, value): | |
"""Signs the given string.""" | |
return value + want_bytes(self.sep) + self.get_signature(value) | |
def verify_signature(self, value, sig): | |
"""Verifies the signature for the given value.""" | |
key = self.derive_key() | |
try: | |
sig = base64_decode(sig) | |
except Exception: | |
return False | |
return self.algorithm.verify_signature(key, value, sig) | |
def unsign(self, signed_value): | |
"""Unsigns the given string.""" | |
signed_value = want_bytes(signed_value) | |
sep = want_bytes(self.sep) | |
if sep not in signed_value: | |
raise BadSignature('No %r found in value' % self.sep) | |
value, sig = signed_value.rsplit(sep, 1) | |
if self.verify_signature(value, sig): | |
return value | |
raise BadSignature('Signature %r does not match' % sig, | |
payload=value) | |
def validate(self, signed_value): | |
"""Just validates the given signed value. Returns `True` if the | |
signature exists and is valid, `False` otherwise.""" | |
try: | |
self.unsign(signed_value) | |
return True | |
except BadSignature: | |
return False | |
class TimestampSigner(Signer): | |
"""Works like the regular :class:`Signer` but also records the time | |
of the signing and can be used to expire signatures. The unsign | |
method can rause a :exc:`SignatureExpired` method if the unsigning | |
failed because the signature is expired. This exception is a subclass | |
of :exc:`BadSignature`. | |
""" | |
def get_timestamp(self): | |
"""Returns the current timestamp. This implementation returns the | |
seconds since 1/1/2011. The function must return an integer. | |
""" | |
return int(time.time() - EPOCH) | |
def timestamp_to_datetime(self, ts): | |
"""Used to convert the timestamp from `get_timestamp` into a | |
datetime object. | |
""" | |
return datetime.utcfromtimestamp(ts + EPOCH) | |
def sign(self, value): | |
"""Signs the given string and also attaches a time information.""" | |
value = want_bytes(value) | |
timestamp = base64_encode(int_to_bytes(self.get_timestamp())) | |
sep = want_bytes(self.sep) | |
value = value + sep + timestamp | |
return value + sep + self.get_signature(value) | |
def unsign(self, value, max_age=None, return_timestamp=False): | |
"""Works like the regular :meth:`~Signer.unsign` but can also | |
validate the time. See the base docstring of the class for | |
the general behavior. If `return_timestamp` is set to `True` | |
the timestamp of the signature will be returned as naive | |
:class:`datetime.datetime` object in UTC. | |
""" | |
try: | |
result = Signer.unsign(self, value) | |
sig_error = None | |
except BadSignature as e: | |
sig_error = e | |
result = e.payload or b'' | |
sep = want_bytes(self.sep) | |
# If there is no timestamp in the result there is something | |
# seriously wrong. In case there was a signature error, we raise | |
# that one directly, otherwise we have a weird situation in which | |
# we shouldn't have come except someone uses a time-based serializer | |
# on non-timestamp data, so catch that. | |
if not sep in result: | |
if sig_error: | |
raise sig_error | |
raise BadTimeSignature('timestamp missing', payload=result) | |
value, timestamp = result.rsplit(sep, 1) | |
try: | |
timestamp = bytes_to_int(base64_decode(timestamp)) | |
except Exception: | |
timestamp = None | |
# Signature is *not* okay. Raise a proper error now that we have | |
# split the value and the timestamp. | |
if sig_error is not None: | |
raise BadTimeSignature(text_type(sig_error), payload=value, | |
date_signed=timestamp) | |
# Signature was okay but the timestamp is actually not there or | |
# malformed. Should not happen, but well. We handle it nonetheless | |
if timestamp is None: | |
raise BadTimeSignature('Malformed timestamp', payload=value) | |
# Check timestamp is not older than max_age | |
if max_age is not None: | |
age = self.get_timestamp() - timestamp | |
if age > max_age: | |
raise SignatureExpired( | |
'Signature age %s > %s seconds' % (age, max_age), | |
payload=value, | |
date_signed=self.timestamp_to_datetime(timestamp)) | |
if return_timestamp: | |
return value, self.timestamp_to_datetime(timestamp) | |
return value | |
def validate(self, signed_value, max_age=None): | |
"""Just validates the given signed value. Returns `True` if the | |
signature exists and is valid, `False` otherwise.""" | |
try: | |
self.unsign(signed_value, max_age=max_age) | |
return True | |
except BadSignature: | |
return False | |
class Serializer(object): | |
"""This class provides a serialization interface on top of the | |
signer. It provides a similar API to json/pickle and other modules but is | |
slightly differently structured internally. If you want to change the | |
underlying implementation for parsing and loading you have to override the | |
:meth:`load_payload` and :meth:`dump_payload` functions. | |
This implementation uses simplejson if available for dumping and loading | |
and will fall back to the standard library's json module if it's not | |
available. | |
Starting with 0.14 you do not need to subclass this class in order to | |
switch out or customer the :class:`Signer`. You can instead also pass a | |
different class to the constructor as well as keyword arguments as | |
dictionary that should be forwarded:: | |
s = Serializer(signer_kwargs={'key_derivation': 'hmac'}) | |
.. versionchanged:: 0.14: | |
The `signer` and `signer_kwargs` parameters were added to the | |
constructor. | |
""" | |
#: If a serializer module or class is not passed to the constructor | |
#: this one is picked up. This currently defaults to :mod:`json`. | |
default_serializer = json | |
#: The default :class:`Signer` class that is being used by this | |
#: serializer. | |
#: | |
#: .. versionadded:: 0.14 | |
default_signer = Signer | |
def __init__(self, secret_key, salt=b'itsdangerous', serializer=None, | |
signer=None, signer_kwargs=None): | |
self.secret_key = want_bytes(secret_key) | |
self.salt = want_bytes(salt) | |
if serializer is None: | |
serializer = self.default_serializer | |
self.serializer = serializer | |
self.is_text_serializer = is_text_serializer(serializer) | |
if signer is None: | |
signer = self.default_signer | |
self.signer = signer | |
self.signer_kwargs = signer_kwargs or {} | |
def load_payload(self, payload, serializer=None): | |
"""Loads the encoded object. This function raises :class:`BadPayload` | |
if the payload is not valid. The `serializer` parameter can be used to | |
override the serializer stored on the class. The encoded payload is | |
always byte based. | |
""" | |
if serializer is None: | |
serializer = self.serializer | |
is_text = self.is_text_serializer | |
else: | |
is_text = is_text_serializer(serializer) | |
try: | |
if is_text: | |
payload = payload.decode('utf-8') | |
return serializer.loads(payload) | |
except Exception as e: | |
raise BadPayload('Could not load the payload because an ' | |
'exception occurred on unserializing the data', | |
original_error=e) | |
def dump_payload(self, obj): | |
"""Dumps the encoded object. The return value is always a | |
bytestring. If the internal serializer is text based the value | |
will automatically be encoded to utf-8. | |
""" | |
return want_bytes(self.serializer.dumps(obj)) | |
def make_signer(self, salt=None): | |
"""A method that creates a new instance of the signer to be used. | |
The default implementation uses the :class:`Signer` baseclass. | |
""" | |
if salt is None: | |
salt = self.salt | |
return self.signer(self.secret_key, salt=salt, **self.signer_kwargs) | |
def dumps(self, obj, salt=None): | |
"""Returns a signed string serialized with the internal serializer. | |
The return value can be either a byte or unicode string depending | |
on the format of the internal serializer. | |
""" | |
payload = want_bytes(self.dump_payload(obj)) | |
rv = self.make_signer(salt).sign(payload) | |
if self.is_text_serializer: | |
rv = rv.decode('utf-8') | |
return rv | |
def dump(self, obj, f, salt=None): | |
"""Like :meth:`dumps` but dumps into a file. The file handle has | |
to be compatible with what the internal serializer expects. | |
""" | |
f.write(self.dumps(obj, salt)) | |
def loads(self, s, salt=None): | |
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the | |
signature validation fails. | |
""" | |
s = want_bytes(s) | |
return self.load_payload(self.make_signer(salt).unsign(s)) | |
def load(self, f, salt=None): | |
"""Like :meth:`loads` but loads from a file.""" | |
return self.loads(f.read(), salt) | |
def loads_unsafe(self, s, salt=None): | |
"""Like :meth:`loads` but without verifying the signature. This is | |
potentially very dangerous to use depending on how your serializer | |
works. The return value is ``(signature_okay, payload)`` instead of | |
just the payload. The first item will be a boolean that indicates | |
if the signature is okay (``True``) or if it failed. This function | |
never fails. | |
Use it for debugging only and if you know that your serializer module | |
is not exploitable (eg: do not use it with a pickle serializer). | |
.. versionadded:: 0.15 | |
""" | |
return self._loads_unsafe_impl(s, salt) | |
def _loads_unsafe_impl(self, s, salt, load_kwargs=None, | |
load_payload_kwargs=None): | |
"""Lowlevel helper function to implement :meth:`loads_unsafe` in | |
serializer subclasses. | |
""" | |
try: | |
return True, self.loads(s, salt=salt, **(load_kwargs or {})) | |
except BadSignature as e: | |
if e.payload is None: | |
return False, None | |
try: | |
return False, self.load_payload(e.payload, | |
**(load_payload_kwargs or {})) | |
except BadPayload: | |
return False, None | |
def load_unsafe(self, f, *args, **kwargs): | |
"""Like :meth:`loads_unsafe` but loads from a file. | |
.. versionadded:: 0.15 | |
""" | |
return self.loads_unsafe(f.read(), *args, **kwargs) | |
class TimedSerializer(Serializer): | |
"""Uses the :class:`TimestampSigner` instead of the default | |
:meth:`Signer`. | |
""" | |
default_signer = TimestampSigner | |
def loads(self, s, max_age=None, return_timestamp=False, salt=None): | |
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the | |
signature validation fails. If a `max_age` is provided it will | |
ensure the signature is not older than that time in seconds. In | |
case the signature is outdated, :exc:`SignatureExpired` is raised | |
which is a subclass of :exc:`BadSignature`. All arguments are | |
forwarded to the signer's :meth:`~TimestampSigner.unsign` method. | |
""" | |
base64d, timestamp = self.make_signer(salt) \ | |
.unsign(s, max_age, return_timestamp=True) | |
payload = self.load_payload(base64d) | |
if return_timestamp: | |
return payload, timestamp | |
return payload | |
def loads_unsafe(self, s, max_age=None, salt=None): | |
load_kwargs = {'max_age': max_age} | |
load_payload_kwargs = {} | |
return self._loads_unsafe_impl(s, salt, load_kwargs, load_payload_kwargs) | |
class JSONWebSignatureSerializer(Serializer): | |
"""This serializer implements JSON Web Signature (JWS) support. Only | |
supports the JWS Compact Serialization. | |
""" | |
jws_algorithms = { | |
'HS256': HMACAlgorithm(hashlib.sha256), | |
'HS384': HMACAlgorithm(hashlib.sha384), | |
'HS512': HMACAlgorithm(hashlib.sha512), | |
'none': NoneAlgorithm(), | |
} | |
#: The default algorithm to use for signature generation | |
default_algorithm = 'HS256' | |
default_serializer = compact_json | |
def __init__(self, secret_key, salt=None, serializer=None, | |
signer=None, signer_kwargs=None, algorithm_name=None): | |
Serializer.__init__(self, secret_key, salt, serializer, | |
signer, signer_kwargs) | |
if algorithm_name is None: | |
algorithm_name = self.default_algorithm | |
self.algorithm_name = algorithm_name | |
self.algorithm = self.make_algorithm(algorithm_name) | |
def load_payload(self, payload, return_header=False): | |
payload = want_bytes(payload) | |
if b'.' not in payload: | |
raise BadPayload('No "." found in value') | |
base64d_header, base64d_payload = payload.split(b'.', 1) | |
try: | |
json_header = base64_decode(base64d_header) | |
except Exception as e: | |
raise BadHeader('Could not base64 decode the header because of ' | |
'an exception', original_error=e) | |
try: | |
json_payload = base64_decode(base64d_payload) | |
except Exception as e: | |
raise BadPayload('Could not base64 decode the payload because of ' | |
'an exception', original_error=e) | |
try: | |
header = Serializer.load_payload(self, json_header, | |
serializer=json) | |
except BadData as e: | |
raise BadHeader('Could not unserialize header because it was ' | |
'malformed', original_error=e) | |
if not isinstance(header, dict): | |
raise BadHeader('Header payload is not a JSON object', | |
header=header) | |
payload = Serializer.load_payload(self, json_payload) | |
if return_header: | |
return payload, header | |
return payload | |
def dump_payload(self, header, obj): | |
base64d_header = base64_encode(self.serializer.dumps(header)) | |
base64d_payload = base64_encode(self.serializer.dumps(obj)) | |
return base64d_header + b'.' + base64d_payload | |
def make_algorithm(self, algorithm_name): | |
try: | |
return self.jws_algorithms[algorithm_name] | |
except KeyError: | |
raise NotImplementedError('Algorithm not supported') | |
def make_signer(self, salt=None, algorithm=None): | |
if salt is None: | |
salt = self.salt | |
key_derivation = 'none' if salt is None else None | |
if algorithm is None: | |
algorithm = self.algorithm | |
return self.signer(self.secret_key, salt=salt, sep='.', | |
key_derivation=key_derivation, algorithm=algorithm) | |
def make_header(self, header_fields): | |
header = header_fields.copy() if header_fields else {} | |
header['alg'] = self.algorithm_name | |
return header | |
def dumps(self, obj, salt=None, header_fields=None): | |
"""Like :meth:`~Serializer.dumps` but creates a JSON Web Signature. It | |
also allows for specifying additional fields to be included in the JWS | |
Header. | |
""" | |
header = self.make_header(header_fields) | |
signer = self.make_signer(salt, self.algorithm) | |
return signer.sign(self.dump_payload(header, obj)) | |
def loads(self, s, salt=None, return_header=False): | |
"""Reverse of :meth:`dumps`. If requested via `return_header` it will | |
return a tuple of payload and header. | |
""" | |
payload, header = self.load_payload( | |
self.make_signer(salt, self.algorithm).unsign(want_bytes(s)), | |
return_header=True) | |
if header.get('alg') != self.algorithm_name: | |
raise BadHeader('Algorithm mismatch', header=header, | |
payload=payload) | |
if return_header: | |
return payload, header | |
return payload | |
def loads_unsafe(self, s, salt=None, return_header=False): | |
kwargs = {'return_header': return_header} | |
return self._loads_unsafe_impl(s, salt, kwargs, kwargs) | |
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): | |
"""Works like the regular :class:`JSONWebSignatureSerializer` but also | |
records the time of the signing and can be used to expire signatures. | |
JWS currently does not specify this behavior but it mentions a possibility | |
extension like this in the spec. Expiry date is encoded into the header | |
similarily as specified in `draft-ietf-oauth-json-web-token | |
<http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#expDef`_. | |
The unsign method can raise a :exc:`SignatureExpired` method if the | |
unsigning failed because the signature is expired. This exception is a | |
subclass of :exc:`BadSignature`. | |
""" | |
DEFAULT_EXPIRES_IN = 3600 | |
def __init__(self, secret_key, expires_in=None, **kwargs): | |
JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs) | |
if expires_in is None: | |
expires_in = self.DEFAULT_EXPIRES_IN | |
self.expires_in = expires_in | |
def make_header(self, header_fields): | |
header = JSONWebSignatureSerializer.make_header(self, header_fields) | |
iat = self.now() | |
exp = iat + self.expires_in | |
header['iat'] = iat | |
header['exp'] = exp | |
return header | |
def loads(self, s, salt=None, return_header=False): | |
payload, header = JSONWebSignatureSerializer.loads( | |
self, s, salt, return_header=True) | |
if 'exp' not in header: | |
raise BadSignature('Missing expiry date', payload=payload) | |
if not (isinstance(header['exp'], number_types) | |
and header['exp'] > 0): | |
raise BadSignature('expiry date is not an IntDate', | |
payload=payload) | |
if header['exp'] < self.now(): | |
raise SignatureExpired('Signature expired', payload=payload, | |
date_signed=self.get_issue_date(header)) | |
if return_header: | |
return payload, header | |
return payload | |
def get_issue_date(self, header): | |
rv = header.get('iat') | |
if isinstance(rv, number_types): | |
return datetime.utcfromtimestamp(int(rv)) | |
def now(self): | |
return int(time.time()) | |
class URLSafeSerializerMixin(object): | |
"""Mixed in with a regular serializer it will attempt to zlib compress | |
the string to make it shorter if necessary. It will also base64 encode | |
the string so that it can safely be placed in a URL. | |
""" | |
def load_payload(self, payload): | |
decompress = False | |
if payload.startswith(b'.'): | |
payload = payload[1:] | |
decompress = True | |
try: | |
json = base64_decode(payload) | |
except Exception as e: | |
raise BadPayload('Could not base64 decode the payload because of ' | |
'an exception', original_error=e) | |
if decompress: | |
try: | |
json = zlib.decompress(json) | |
except Exception as e: | |
raise BadPayload('Could not zlib decompress the payload before ' | |
'decoding the payload', original_error=e) | |
return super(URLSafeSerializerMixin, self).load_payload(json) | |
def dump_payload(self, obj): | |
json = super(URLSafeSerializerMixin, self).dump_payload(obj) | |
is_compressed = False | |
compressed = zlib.compress(json) | |
if len(compressed) < (len(json) - 1): | |
json = compressed | |
is_compressed = True | |
base64d = base64_encode(json) | |
if is_compressed: | |
base64d = b'.' + base64d | |
return base64d | |
class URLSafeSerializer(URLSafeSerializerMixin, Serializer): | |
"""Works like :class:`Serializer` but dumps and loads into a URL | |
safe string consisting of the upper and lowercase character of the | |
alphabet as well as ``'_'``, ``'-'`` and ``'.'``. | |
""" | |
default_serializer = compact_json | |
class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): | |
"""Works like :class:`TimedSerializer` but dumps and loads into a URL | |
safe string consisting of the upper and lowercase character of the | |
alphabet as well as ``'_'``, ``'-'`` and ``'.'``. | |
""" | |
default_serializer = compact_json |
Jinja2 is a template engine written in pure Python. It provides a Django inspired non-XML syntax but supports inline expressions and an optional sandboxed environment.
Here a small example of a Jinja template:
{% extends 'base.html' %} {% block title %}Memberlist{% endblock %} {% block content %} <ul> {% for user in users %} <li><a href="{{ user.url }}">{{ user.username }}</a></li> {% endfor %} </ul> {% endblock %}
Application logic is for the controller but don't try to make the life for the template designer too hard by giving him too few functionality.
For more informations visit the new Jinja2 webpage and documentation.
[babel.extractors] | |
jinja2 = jinja2.ext:babel_extract[i18n] | |
pip |
Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details. | |
Some rights reserved. | |
Redistribution and use in source and binary forms, with or without | |
modification, are permitted provided that the following conditions are | |
met: | |
* Redistributions of source code must retain the above copyright | |
notice, this list of conditions and the following disclaimer. | |
* Redistributions in binary form must reproduce the above | |
copyright notice, this list of conditions and the following | |
disclaimer in the documentation and/or other materials provided | |
with the distribution. | |
* The names of the contributors may not be used to endorse or | |
promote products derived from this software without specific | |
prior written permission. | |
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
Metadata-Version: 2.0 | |
Name: Jinja2 | |
Version: 2.10 | |
Summary: A small but fast and easy to use stand-alone template engine written in pure python. | |
Home-page: http://jinja.pocoo.org/ | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
License: BSD | |
Description-Content-Type: UNKNOWN | |
Platform: UNKNOWN | |
Classifier: Development Status :: 5 - Production/Stable | |
Classifier: Environment :: Web Environment | |
Classifier: Intended Audience :: Developers | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Operating System :: OS Independent | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 2 | |
Classifier: Programming Language :: Python :: 2.6 | |
Classifier: Programming Language :: Python :: 2.7 | |
Classifier: Programming Language :: Python :: 3 | |
Classifier: Programming Language :: Python :: 3.3 | |
Classifier: Programming Language :: Python :: 3.4 | |
Classifier: Programming Language :: Python :: 3.5 | |
Classifier: Programming Language :: Python :: 3.6 | |
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content | |
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |
Classifier: Topic :: Text Processing :: Markup :: HTML | |
Requires-Dist: MarkupSafe (>=0.23) | |
Provides-Extra: i18n | |
Requires-Dist: Babel (>=0.8); extra == 'i18n' | |
Jinja2 | |
~~~~~~ | |
Jinja2 is a template engine written in pure Python. It provides a | |
`Django`_ inspired non-XML syntax but supports inline expressions and | |
an optional `sandboxed`_ environment. | |
Nutshell | |
-------- | |
Here a small example of a Jinja template:: | |
{% extends 'base.html' %} | |
{% block title %}Memberlist{% endblock %} | |
{% block content %} | |
<ul> | |
{% for user in users %} | |
<li><a href="{{ user.url }}">{{ user.username }}</a></li> | |
{% endfor %} | |
</ul> | |
{% endblock %} | |
Philosophy | |
---------- | |
Application logic is for the controller but don't try to make the life | |
for the template designer too hard by giving him too few functionality. | |
For more informations visit the new `Jinja2 webpage`_ and `documentation`_. | |
.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) | |
.. _Django: https://www.djangoproject.com/ | |
.. _Jinja2 webpage: http://jinja.pocoo.org/ | |
.. _documentation: http://jinja.pocoo.org/2/documentation/ | |
{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Markup :: HTML"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://jinja.pocoo.org/"}}, "python.exports": {"babel.extractors": {"jinja2": "jinja2.ext:babel_extract [i18n]"}}}, "extras": ["i18n"], "generator": "bdist_wheel (0.30.0)", "license": "BSD", "metadata_version": "2.0", "name": "Jinja2", "run_requires": [{"extra": "i18n", "requires": ["Babel (>=0.8)"]}, {"requires": ["MarkupSafe (>=0.23)"]}], "summary": "A small but fast and easy to use stand-alone template engine written in pure python.", "version": "2.10"} |
Jinja2-2.10.dist-info/DESCRIPTION.rst,sha256=b5ckFDoM7vVtz_mAsJD4OPteFKCqE7beu353g4COoYI,978 | |
Jinja2-2.10.dist-info/LICENSE.txt,sha256=JvzUNv3Io51EiWrAPm8d_SXjhJnEjyDYvB3Tvwqqils,1554 | |
Jinja2-2.10.dist-info/METADATA,sha256=18EgU8zR6-av-0-5y_gXebzK4GnBB_76lALUsl-6QHM,2258 | |
Jinja2-2.10.dist-info/RECORD,, | |
Jinja2-2.10.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 | |
Jinja2-2.10.dist-info/entry_points.txt,sha256=NdzVcOrqyNyKDxD09aERj__3bFx2paZhizFDsKmVhiA,72 | |
Jinja2-2.10.dist-info/metadata.json,sha256=NPUJ9TMBxVQAv_kTJzvU8HwmP-4XZvbK9mz6_4YUVl4,1473 | |
Jinja2-2.10.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 | |
jinja2/__init__.py,sha256=xJHjaMoy51_KXn1wf0cysH6tUUifUxZCwSOfcJGEYZw,2614 | |
jinja2/_compat.py,sha256=xP60CE5Qr8FTYcDE1f54tbZLKGvMwYml4-8T7Q4KG9k,2596 | |
jinja2/_identifier.py,sha256=W1QBSY-iJsyt6oR_nKSuNNCzV95vLIOYgUNPUI1d5gU,1726 | |
jinja2/asyncfilters.py,sha256=cTDPvrS8Hp_IkwsZ1m9af_lr5nHysw7uTa5gV0NmZVE,4144 | |
jinja2/asyncsupport.py,sha256=UErQ3YlTLaSjFb94P4MVn08-aVD9jJxty2JVfMRb-1M,7878 | |
jinja2/bccache.py,sha256=nQldx0ZRYANMyfvOihRoYFKSlUdd5vJkS7BjxNwlOZM,12794 | |
jinja2/compiler.py,sha256=BqC5U6JxObSRhblyT_a6Tp5GtEU5z3US1a4jLQaxxgo,65386 | |
jinja2/constants.py,sha256=uwwV8ZUhHhacAuz5PTwckfsbqBaqM7aKfyJL7kGX5YQ,1626 | |
jinja2/debug.py,sha256=WTVeUFGUa4v6ReCsYv-iVPa3pkNB75OinJt3PfxNdXs,12045 | |
jinja2/defaults.py,sha256=Em-95hmsJxIenDCZFB1YSvf9CNhe9rBmytN3yUrBcWA,1400 | |
jinja2/environment.py,sha256=VnkAkqw8JbjZct4tAyHlpBrka2vqB-Z58RAP-32P1ZY,50849 | |
jinja2/exceptions.py,sha256=_Rj-NVi98Q6AiEjYQOsP8dEIdu5AlmRHzcSNOPdWix4,4428 | |
jinja2/ext.py,sha256=atMQydEC86tN1zUsdQiHw5L5cF62nDbqGue25Yiu3N4,24500 | |
jinja2/filters.py,sha256=yOAJk0MsH-_gEC0i0U6NweVQhbtYaC-uE8xswHFLF4w,36528 | |
jinja2/idtracking.py,sha256=2GbDSzIvGArEBGLkovLkqEfmYxmWsEf8c3QZwM4uNsw,9197 | |
jinja2/lexer.py,sha256=ySEPoXd1g7wRjsuw23uimS6nkGN5aqrYwcOKxCaVMBQ,28559 | |
jinja2/loaders.py,sha256=xiTuURKAEObyym0nU8PCIXu_Qp8fn0AJ5oIADUUm-5Q,17382 | |
jinja2/meta.py,sha256=fmKHxkmZYAOm9QyWWy8EMd6eefAIh234rkBMW2X4ZR8,4340 | |
jinja2/nativetypes.py,sha256=_sJhS8f-8Q0QMIC0dm1YEdLyxEyoO-kch8qOL5xUDfE,7308 | |
jinja2/nodes.py,sha256=L10L_nQDfubLhO3XjpF9qz46FSh2clL-3e49ogVlMmA,30853 | |
jinja2/optimizer.py,sha256=MsdlFACJ0FRdPtjmCAdt7JQ9SGrXFaDNUaslsWQaG3M,1722 | |
jinja2/parser.py,sha256=lPzTEbcpTRBLw8ii6OYyExHeAhaZLMA05Hpv4ll3ULk,35875 | |
jinja2/runtime.py,sha256=DHdD38Pq8gj7uWQC5usJyWFoNWL317A9AvXOW_CLB34,27755 | |
jinja2/sandbox.py,sha256=TVyZHlNqqTzsv9fv2NvJNmSdWRHTguhyMHdxjWms32U,16708 | |
jinja2/tests.py,sha256=iJQLwbapZr-EKquTG_fVOVdwHUUKf3SX9eNkjQDF8oU,4237 | |
jinja2/utils.py,sha256=q24VupGZotQ-uOyrJxCaXtDWhZC1RgsQG7kcdmjck2Q,20629 | |
jinja2/visitor.py,sha256=JD1H1cANA29JcntFfN5fPyqQxB4bI4wC00BzZa-XHks,3316 | |
Jinja2-2.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |
jinja2/__pycache__/asyncfilters.cpython-37.pyc,, | |
jinja2/__pycache__/asyncsupport.cpython-37.pyc,, | |
jinja2/__pycache__/bccache.cpython-37.pyc,, | |
jinja2/__pycache__/compiler.cpython-37.pyc,, | |
jinja2/__pycache__/constants.cpython-37.pyc,, | |
jinja2/__pycache__/debug.cpython-37.pyc,, | |
jinja2/__pycache__/defaults.cpython-37.pyc,, | |
jinja2/__pycache__/environment.cpython-37.pyc,, | |
jinja2/__pycache__/exceptions.cpython-37.pyc,, | |
jinja2/__pycache__/ext.cpython-37.pyc,, | |
jinja2/__pycache__/filters.cpython-37.pyc,, | |
jinja2/__pycache__/idtracking.cpython-37.pyc,, | |
jinja2/__pycache__/lexer.cpython-37.pyc,, | |
jinja2/__pycache__/loaders.cpython-37.pyc,, | |
jinja2/__pycache__/meta.cpython-37.pyc,, | |
jinja2/__pycache__/nativetypes.cpython-37.pyc,, | |
jinja2/__pycache__/nodes.cpython-37.pyc,, | |
jinja2/__pycache__/optimizer.cpython-37.pyc,, | |
jinja2/__pycache__/parser.cpython-37.pyc,, | |
jinja2/__pycache__/runtime.cpython-37.pyc,, | |
jinja2/__pycache__/sandbox.cpython-37.pyc,, | |
jinja2/__pycache__/tests.cpython-37.pyc,, | |
jinja2/__pycache__/utils.cpython-37.pyc,, | |
jinja2/__pycache__/visitor.cpython-37.pyc,, | |
jinja2/__pycache__/_compat.cpython-37.pyc,, | |
jinja2/__pycache__/_identifier.cpython-37.pyc,, | |
jinja2/__pycache__/__init__.cpython-37.pyc,, |
jinja2 |
Wheel-Version: 1.0 | |
Generator: bdist_wheel (0.30.0) | |
Root-Is-Purelib: true | |
Tag: py2-none-any | |
Tag: py3-none-any | |
# -*- coding: utf-8 -*- | |
""" | |
jinja2 | |
~~~~~~ | |
Jinja2 is a template engine written in pure Python. It provides a | |
Django inspired non-XML syntax but supports inline expressions and | |
an optional sandboxed environment. | |
Nutshell | |
-------- | |
Here a small example of a Jinja2 template:: | |
{% extends 'base.html' %} | |
{% block title %}Memberlist{% endblock %} | |
{% block content %} | |
<ul> | |
{% for user in users %} | |
<li><a href="{{ user.url }}">{{ user.username }}</a></li> | |
{% endfor %} | |
</ul> | |
{% endblock %} | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
__docformat__ = 'restructuredtext en' | |
__version__ = '2.10' | |
# high level interface | |
from jinja2.environment import Environment, Template | |
# loaders | |
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \ | |
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \ | |
ModuleLoader | |
# bytecode caches | |
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \ | |
MemcachedBytecodeCache | |
# undefined types | |
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \ | |
make_logging_undefined | |
# exceptions | |
from jinja2.exceptions import TemplateError, UndefinedError, \ | |
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \ | |
TemplateAssertionError, TemplateRuntimeError | |
# decorators and public utilities | |
from jinja2.filters import environmentfilter, contextfilter, \ | |
evalcontextfilter | |
from jinja2.utils import Markup, escape, clear_caches, \ | |
environmentfunction, evalcontextfunction, contextfunction, \ | |
is_undefined, select_autoescape | |
__all__ = [ | |
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader', | |
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader', | |
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache', | |
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined', | |
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound', | |
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError', | |
'TemplateRuntimeError', | |
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape', | |
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined', | |
'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined', | |
'select_autoescape', | |
] | |
def _patch_async(): | |
from jinja2.utils import have_async_gen | |
if have_async_gen: | |
from jinja2.asyncsupport import patch_all | |
patch_all() | |
_patch_async() | |
del _patch_async |
# -*- coding: utf-8 -*- | |
""" | |
jinja2._compat | |
~~~~~~~~~~~~~~ | |
Some py2/py3 compatibility support based on a stripped down | |
version of six so we don't have to depend on a specific version | |
of it. | |
:copyright: Copyright 2013 by the Jinja team, see AUTHORS. | |
:license: BSD, see LICENSE for details. | |
""" | |
import sys | |
PY2 = sys.version_info[0] == 2 | |
PYPY = hasattr(sys, 'pypy_translation_info') | |
_identity = lambda x: x | |
if not PY2: | |
unichr = chr | |
range_type = range | |
text_type = str | |
string_types = (str,) | |
integer_types = (int,) | |
iterkeys = lambda d: iter(d.keys()) | |
itervalues = lambda d: iter(d.values()) | |
iteritems = lambda d: iter(d.items()) | |
import pickle | |
from io import BytesIO, StringIO | |
NativeStringIO = StringIO | |
def reraise(tp, value, tb=None): | |
if value.__traceback__ is not tb: | |
raise value.with_traceback(tb) | |
raise value | |
ifilter = filter | |
imap = map | |
izip = zip | |
intern = sys.intern | |
implements_iterator = _identity | |
implements_to_string = _identity | |
encode_filename = _identity | |
else: | |
unichr = unichr | |
text_type = unicode | |
range_type = xrange | |
string_types = (str, unicode) | |
integer_types = (int, long) | |
iterkeys = lambda d: d.iterkeys() | |
itervalues = lambda d: d.itervalues() | |
iteritems = lambda d: d.iteritems() | |
import cPickle as pickle | |
from cStringIO import StringIO as BytesIO, StringIO | |
NativeStringIO = BytesIO | |
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') | |
from itertools import imap, izip, ifilter | |
intern = intern | |
def implements_iterator(cls): | |
cls.next = cls.__next__ | |
del cls.__next__ | |
return cls | |
def implements_to_string(cls): | |
cls.__unicode__ = cls.__str__ | |
cls.__str__ = lambda x: x.__unicode__().encode('utf-8') | |
return cls | |
def encode_filename(filename): | |
if isinstance(filename, unicode): | |
return filename.encode('utf-8') | |
return filename | |
def with_metaclass(meta, *bases): | |
"""Create a base class with a metaclass.""" | |
# This requires a bit of explanation: the basic idea is to make a | |
# dummy metaclass for one level of class instantiation that replaces | |
# itself with the actual metaclass. | |
class metaclass(type): | |
def __new__(cls, name, this_bases, d): | |
return meta(name, bases, d) | |
return type.__new__(metaclass, 'temporary_class', (), {}) | |
try: | |
from urllib.parse import quote_from_bytes as url_quote | |
except ImportError: | |
from urllib import quote as url_quote |
# generated by scripts/generate_identifier_pattern.py | |
pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯' |
from functools import wraps | |
from jinja2.asyncsupport import auto_aiter | |
from jinja2 import filters | |
async def auto_to_seq(value): | |
seq = [] | |
if hasattr(value, '__aiter__'): | |
async for item in value: | |
seq.append(item) | |
else: | |
for item in value: | |
seq.append(item) | |
return seq | |
async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): | |
seq, func = filters.prepare_select_or_reject( | |
args, kwargs, modfunc, lookup_attr) | |
if seq: | |
async for item in auto_aiter(seq): | |
if func(item): | |
yield item | |
def dualfilter(normal_filter, async_filter): | |
wrap_evalctx = False | |
if getattr(normal_filter, 'environmentfilter', False): | |
is_async = lambda args: args[0].is_async | |
wrap_evalctx = False | |
else: | |
if not getattr(normal_filter, 'evalcontextfilter', False) and \ | |
not getattr(normal_filter, 'contextfilter', False): | |
wrap_evalctx = True | |
is_async = lambda args: args[0].environment.is_async | |
@wraps(normal_filter) | |
def wrapper(*args, **kwargs): | |
b = is_async(args) | |
if wrap_evalctx: | |
args = args[1:] | |
if b: | |
return async_filter(*args, **kwargs) | |
return normal_filter(*args, **kwargs) | |
if wrap_evalctx: | |
wrapper.evalcontextfilter = True | |
wrapper.asyncfiltervariant = True | |
return wrapper | |
def asyncfiltervariant(original): | |
def decorator(f): | |
return dualfilter(original, f) | |
return decorator | |
@asyncfiltervariant(filters.do_first) | |
async def do_first(environment, seq): | |
try: | |
return await auto_aiter(seq).__anext__() | |
except StopAsyncIteration: | |
return environment.undefined('No first item, sequence was empty.') | |
@asyncfiltervariant(filters.do_groupby) | |
async def do_groupby(environment, value, attribute): | |
expr = filters.make_attrgetter(environment, attribute) | |
return [filters._GroupTuple(key, await auto_to_seq(values)) | |
for key, values in filters.groupby(sorted( | |
await auto_to_seq(value), key=expr), expr)] | |
@asyncfiltervariant(filters.do_join) | |
async def do_join(eval_ctx, value, d=u'', attribute=None): | |
return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) | |
@asyncfiltervariant(filters.do_list) | |
async def do_list(value): | |
return await auto_to_seq(value) | |
@asyncfiltervariant(filters.do_reject) | |
async def do_reject(*args, **kwargs): | |
return async_select_or_reject(args, kwargs, lambda x: not x, False) | |
@asyncfiltervariant(filters.do_rejectattr) | |
async def do_rejectattr(*args, **kwargs): | |
return async_select_or_reject(args, kwargs, lambda x: not x, True) | |
@asyncfiltervariant(filters.do_select) | |
async def do_select(*args, **kwargs): | |
return async_select_or_reject(args, kwargs, lambda x: x, False) | |
@asyncfiltervariant(filters.do_selectattr) | |
async def do_selectattr(*args, **kwargs): | |
return async_select_or_reject(args, kwargs, lambda x: x, True) | |
@asyncfiltervariant(filters.do_map) | |
async def do_map(*args, **kwargs): | |
seq, func = filters.prepare_map(args, kwargs) | |
if seq: | |
async for item in auto_aiter(seq): | |
yield func(item) | |
@asyncfiltervariant(filters.do_sum) | |
async def do_sum(environment, iterable, attribute=None, start=0): | |
rv = start | |
if attribute is not None: | |
func = filters.make_attrgetter(environment, attribute) | |
else: | |
func = lambda x: x | |
async for item in auto_aiter(iterable): | |
rv += func(item) | |
return rv | |
@asyncfiltervariant(filters.do_slice) | |
async def do_slice(value, slices, fill_with=None): | |
return filters.do_slice(await auto_to_seq(value), slices, fill_with) | |
ASYNC_FILTERS = { | |
'first': do_first, | |
'groupby': do_groupby, | |
'join': do_join, | |
'list': do_list, | |
# we intentionally do not support do_last because that would be | |
# ridiculous | |
'reject': do_reject, | |
'rejectattr': do_rejectattr, | |
'map': do_map, | |
'select': do_select, | |
'selectattr': do_selectattr, | |
'sum': do_sum, | |
'slice': do_slice, | |
} |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.asyncsupport | |
~~~~~~~~~~~~~~~~~~~ | |
Has all the code for async support which is implemented as a patch | |
for supported Python versions. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import sys | |
import asyncio | |
import inspect | |
from functools import update_wrapper | |
from jinja2.utils import concat, internalcode, Markup | |
from jinja2.environment import TemplateModule | |
from jinja2.runtime import LoopContextBase, _last_iteration | |
async def concat_async(async_gen): | |
rv = [] | |
async def collect(): | |
async for event in async_gen: | |
rv.append(event) | |
await collect() | |
return concat(rv) | |
async def generate_async(self, *args, **kwargs): | |
vars = dict(*args, **kwargs) | |
try: | |
async for event in self.root_render_func(self.new_context(vars)): | |
yield event | |
except Exception: | |
exc_info = sys.exc_info() | |
else: | |
return | |
yield self.environment.handle_exception(exc_info, True) | |
def wrap_generate_func(original_generate): | |
def _convert_generator(self, loop, args, kwargs): | |
async_gen = self.generate_async(*args, **kwargs) | |
try: | |
while 1: | |
yield loop.run_until_complete(async_gen.__anext__()) | |
except StopAsyncIteration: | |
pass | |
def generate(self, *args, **kwargs): | |
if not self.environment.is_async: | |
return original_generate(self, *args, **kwargs) | |
return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) | |
return update_wrapper(generate, original_generate) | |
async def render_async(self, *args, **kwargs): | |
if not self.environment.is_async: | |
raise RuntimeError('The environment was not created with async mode ' | |
'enabled.') | |
vars = dict(*args, **kwargs) | |
ctx = self.new_context(vars) | |
try: | |
return await concat_async(self.root_render_func(ctx)) | |
except Exception: | |
exc_info = sys.exc_info() | |
return self.environment.handle_exception(exc_info, True) | |
def wrap_render_func(original_render): | |
def render(self, *args, **kwargs): | |
if not self.environment.is_async: | |
return original_render(self, *args, **kwargs) | |
loop = asyncio.get_event_loop() | |
return loop.run_until_complete(self.render_async(*args, **kwargs)) | |
return update_wrapper(render, original_render) | |
def wrap_block_reference_call(original_call): | |
@internalcode | |
async def async_call(self): | |
rv = await concat_async(self._stack[self._depth](self._context)) | |
if self._context.eval_ctx.autoescape: | |
rv = Markup(rv) | |
return rv | |
@internalcode | |
def __call__(self): | |
if not self._context.environment.is_async: | |
return original_call(self) | |
return async_call(self) | |
return update_wrapper(__call__, original_call) | |
def wrap_macro_invoke(original_invoke): | |
@internalcode | |
async def async_invoke(self, arguments, autoescape): | |
rv = await self._func(*arguments) | |
if autoescape: | |
rv = Markup(rv) | |
return rv | |
@internalcode | |
def _invoke(self, arguments, autoescape): | |
if not self._environment.is_async: | |
return original_invoke(self, arguments, autoescape) | |
return async_invoke(self, arguments, autoescape) | |
return update_wrapper(_invoke, original_invoke) | |
@internalcode | |
async def get_default_module_async(self): | |
if self._module is not None: | |
return self._module | |
self._module = rv = await self.make_module_async() | |
return rv | |
def wrap_default_module(original_default_module): | |
@internalcode | |
def _get_default_module(self): | |
if self.environment.is_async: | |
raise RuntimeError('Template module attribute is unavailable ' | |
'in async mode') | |
return original_default_module(self) | |
return _get_default_module | |
async def make_module_async(self, vars=None, shared=False, locals=None): | |
context = self.new_context(vars, shared, locals) | |
body_stream = [] | |
async for item in self.root_render_func(context): | |
body_stream.append(item) | |
return TemplateModule(self, context, body_stream) | |
def patch_template(): | |
from jinja2 import Template | |
Template.generate = wrap_generate_func(Template.generate) | |
Template.generate_async = update_wrapper( | |
generate_async, Template.generate_async) | |
Template.render_async = update_wrapper( | |
render_async, Template.render_async) | |
Template.render = wrap_render_func(Template.render) | |
Template._get_default_module = wrap_default_module( | |
Template._get_default_module) | |
Template._get_default_module_async = get_default_module_async | |
Template.make_module_async = update_wrapper( | |
make_module_async, Template.make_module_async) | |
def patch_runtime(): | |
from jinja2.runtime import BlockReference, Macro | |
BlockReference.__call__ = wrap_block_reference_call( | |
BlockReference.__call__) | |
Macro._invoke = wrap_macro_invoke(Macro._invoke) | |
def patch_filters(): | |
from jinja2.filters import FILTERS | |
from jinja2.asyncfilters import ASYNC_FILTERS | |
FILTERS.update(ASYNC_FILTERS) | |
def patch_all(): | |
patch_template() | |
patch_runtime() | |
patch_filters() | |
async def auto_await(value): | |
if inspect.isawaitable(value): | |
return await value | |
return value | |
async def auto_aiter(iterable): | |
if hasattr(iterable, '__aiter__'): | |
async for item in iterable: | |
yield item | |
return | |
for item in iterable: | |
yield item | |
class AsyncLoopContext(LoopContextBase): | |
def __init__(self, async_iterator, undefined, after, length, recurse=None, | |
depth0=0): | |
LoopContextBase.__init__(self, undefined, recurse, depth0) | |
self._async_iterator = async_iterator | |
self._after = after | |
self._length = length | |
@property | |
def length(self): | |
if self._length is None: | |
raise TypeError('Loop length for some iterators cannot be ' | |
'lazily calculated in async mode') | |
return self._length | |
def __aiter__(self): | |
return AsyncLoopContextIterator(self) | |
class AsyncLoopContextIterator(object): | |
__slots__ = ('context',) | |
def __init__(self, context): | |
self.context = context | |
def __aiter__(self): | |
return self | |
async def __anext__(self): | |
ctx = self.context | |
ctx.index0 += 1 | |
if ctx._after is _last_iteration: | |
raise StopAsyncIteration() | |
ctx._before = ctx._current | |
ctx._current = ctx._after | |
try: | |
ctx._after = await ctx._async_iterator.__anext__() | |
except StopAsyncIteration: | |
ctx._after = _last_iteration | |
return ctx._current, ctx | |
async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): | |
# Length is more complicated and less efficient in async mode. The | |
# reason for this is that we cannot know if length will be used | |
# upfront but because length is a property we cannot lazily execute it | |
# later. This means that we need to buffer it up and measure :( | |
# | |
# We however only do this for actual iterators, not for async | |
# iterators as blocking here does not seem like the best idea in the | |
# world. | |
try: | |
length = len(iterable) | |
except (TypeError, AttributeError): | |
if not hasattr(iterable, '__aiter__'): | |
iterable = tuple(iterable) | |
length = len(iterable) | |
else: | |
length = None | |
async_iterator = auto_aiter(iterable) | |
try: | |
after = await async_iterator.__anext__() | |
except StopAsyncIteration: | |
after = _last_iteration | |
return AsyncLoopContext(async_iterator, undefined, after, length, recurse, | |
depth0) |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.bccache | |
~~~~~~~~~~~~~~ | |
This module implements the bytecode cache system Jinja is optionally | |
using. This is useful if you have very complex template situations and | |
the compiliation of all those templates slow down your application too | |
much. | |
Situations where this is useful are often forking web applications that | |
are initialized on the first request. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD. | |
""" | |
from os import path, listdir | |
import os | |
import sys | |
import stat | |
import errno | |
import marshal | |
import tempfile | |
import fnmatch | |
from hashlib import sha1 | |
from jinja2.utils import open_if_exists | |
from jinja2._compat import BytesIO, pickle, PY2, text_type | |
# marshal works better on 3.x, one hack less required | |
if not PY2: | |
marshal_dump = marshal.dump | |
marshal_load = marshal.load | |
else: | |
def marshal_dump(code, f): | |
if isinstance(f, file): | |
marshal.dump(code, f) | |
else: | |
f.write(marshal.dumps(code)) | |
def marshal_load(f): | |
if isinstance(f, file): | |
return marshal.load(f) | |
return marshal.loads(f.read()) | |
bc_version = 3 | |
# magic version used to only change with new jinja versions. With 2.6 | |
# we change this to also take Python version changes into account. The | |
# reason for this is that Python tends to segfault if fed earlier bytecode | |
# versions because someone thought it would be a good idea to reuse opcodes | |
# or make Python incompatible with earlier versions. | |
bc_magic = 'j2'.encode('ascii') + \ | |
pickle.dumps(bc_version, 2) + \ | |
pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1]) | |
class Bucket(object): | |
"""Buckets are used to store the bytecode for one template. It's created | |
and initialized by the bytecode cache and passed to the loading functions. | |
The buckets get an internal checksum from the cache assigned and use this | |
to automatically reject outdated cache material. Individual bytecode | |
cache subclasses don't have to care about cache invalidation. | |
""" | |
def __init__(self, environment, key, checksum): | |
self.environment = environment | |
self.key = key | |
self.checksum = checksum | |
self.reset() | |
def reset(self): | |
"""Resets the bucket (unloads the bytecode).""" | |
self.code = None | |
def load_bytecode(self, f): | |
"""Loads bytecode from a file or file like object.""" | |
# make sure the magic header is correct | |
magic = f.read(len(bc_magic)) | |
if magic != bc_magic: | |
self.reset() | |
return | |
# the source code of the file changed, we need to reload | |
checksum = pickle.load(f) | |
if self.checksum != checksum: | |
self.reset() | |
return | |
# if marshal_load fails then we need to reload | |
try: | |
self.code = marshal_load(f) | |
except (EOFError, ValueError, TypeError): | |
self.reset() | |
return | |
def write_bytecode(self, f): | |
"""Dump the bytecode into the file or file like object passed.""" | |
if self.code is None: | |
raise TypeError('can\'t write empty bucket') | |
f.write(bc_magic) | |
pickle.dump(self.checksum, f, 2) | |
marshal_dump(self.code, f) | |
def bytecode_from_string(self, string): | |
"""Load bytecode from a string.""" | |
self.load_bytecode(BytesIO(string)) | |
def bytecode_to_string(self): | |
"""Return the bytecode as string.""" | |
out = BytesIO() | |
self.write_bytecode(out) | |
return out.getvalue() | |
class BytecodeCache(object): | |
"""To implement your own bytecode cache you have to subclass this class | |
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of | |
these methods are passed a :class:`~jinja2.bccache.Bucket`. | |
A very basic bytecode cache that saves the bytecode on the file system:: | |
from os import path | |
class MyCache(BytecodeCache): | |
def __init__(self, directory): | |
self.directory = directory | |
def load_bytecode(self, bucket): | |
filename = path.join(self.directory, bucket.key) | |
if path.exists(filename): | |
with open(filename, 'rb') as f: | |
bucket.load_bytecode(f) | |
def dump_bytecode(self, bucket): | |
filename = path.join(self.directory, bucket.key) | |
with open(filename, 'wb') as f: | |
bucket.write_bytecode(f) | |
A more advanced version of a filesystem based bytecode cache is part of | |
Jinja2. | |
""" | |
def load_bytecode(self, bucket): | |
"""Subclasses have to override this method to load bytecode into a | |
bucket. If they are not able to find code in the cache for the | |
bucket, it must not do anything. | |
""" | |
raise NotImplementedError() | |
def dump_bytecode(self, bucket): | |
"""Subclasses have to override this method to write the bytecode | |
from a bucket back to the cache. If it unable to do so it must not | |
fail silently but raise an exception. | |
""" | |
raise NotImplementedError() | |
def clear(self): | |
"""Clears the cache. This method is not used by Jinja2 but should be | |
implemented to allow applications to clear the bytecode cache used | |
by a particular environment. | |
""" | |
def get_cache_key(self, name, filename=None): | |
"""Returns the unique hash key for this template name.""" | |
hash = sha1(name.encode('utf-8')) | |
if filename is not None: | |
filename = '|' + filename | |
if isinstance(filename, text_type): | |
filename = filename.encode('utf-8') | |
hash.update(filename) | |
return hash.hexdigest() | |
def get_source_checksum(self, source): | |
"""Returns a checksum for the source.""" | |
return sha1(source.encode('utf-8')).hexdigest() | |
def get_bucket(self, environment, name, filename, source): | |
"""Return a cache bucket for the given template. All arguments are | |
mandatory but filename may be `None`. | |
""" | |
key = self.get_cache_key(name, filename) | |
checksum = self.get_source_checksum(source) | |
bucket = Bucket(environment, key, checksum) | |
self.load_bytecode(bucket) | |
return bucket | |
def set_bucket(self, bucket): | |
"""Put the bucket into the cache.""" | |
self.dump_bytecode(bucket) | |
class FileSystemBytecodeCache(BytecodeCache): | |
"""A bytecode cache that stores bytecode on the filesystem. It accepts | |
two arguments: The directory where the cache items are stored and a | |
pattern string that is used to build the filename. | |
If no directory is specified a default cache directory is selected. On | |
Windows the user's temp directory is used, on UNIX systems a directory | |
is created for the user in the system temp directory. | |
The pattern can be used to have multiple separate caches operate on the | |
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` | |
is replaced with the cache key. | |
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') | |
This bytecode cache supports clearing of the cache using the clear method. | |
""" | |
def __init__(self, directory=None, pattern='__jinja2_%s.cache'): | |
if directory is None: | |
directory = self._get_default_cache_dir() | |
self.directory = directory | |
self.pattern = pattern | |
def _get_default_cache_dir(self): | |
def _unsafe_dir(): | |
raise RuntimeError('Cannot determine safe temp directory. You ' | |
'need to explicitly provide one.') | |
tmpdir = tempfile.gettempdir() | |
# On windows the temporary directory is used specific unless | |
# explicitly forced otherwise. We can just use that. | |
if os.name == 'nt': | |
return tmpdir | |
if not hasattr(os, 'getuid'): | |
_unsafe_dir() | |
dirname = '_jinja2-cache-%d' % os.getuid() | |
actual_dir = os.path.join(tmpdir, dirname) | |
try: | |
os.mkdir(actual_dir, stat.S_IRWXU) | |
except OSError as e: | |
if e.errno != errno.EEXIST: | |
raise | |
try: | |
os.chmod(actual_dir, stat.S_IRWXU) | |
actual_dir_stat = os.lstat(actual_dir) | |
if actual_dir_stat.st_uid != os.getuid() \ | |
or not stat.S_ISDIR(actual_dir_stat.st_mode) \ | |
or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: | |
_unsafe_dir() | |
except OSError as e: | |
if e.errno != errno.EEXIST: | |
raise | |
actual_dir_stat = os.lstat(actual_dir) | |
if actual_dir_stat.st_uid != os.getuid() \ | |
or not stat.S_ISDIR(actual_dir_stat.st_mode) \ | |
or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: | |
_unsafe_dir() | |
return actual_dir | |
def _get_cache_filename(self, bucket): | |
return path.join(self.directory, self.pattern % bucket.key) | |
def load_bytecode(self, bucket): | |
f = open_if_exists(self._get_cache_filename(bucket), 'rb') | |
if f is not None: | |
try: | |
bucket.load_bytecode(f) | |
finally: | |
f.close() | |
def dump_bytecode(self, bucket): | |
f = open(self._get_cache_filename(bucket), 'wb') | |
try: | |
bucket.write_bytecode(f) | |
finally: | |
f.close() | |
def clear(self): | |
# imported lazily here because google app-engine doesn't support | |
# write access on the file system and the function does not exist | |
# normally. | |
from os import remove | |
files = fnmatch.filter(listdir(self.directory), self.pattern % '*') | |
for filename in files: | |
try: | |
remove(path.join(self.directory, filename)) | |
except OSError: | |
pass | |
class MemcachedBytecodeCache(BytecodeCache): | |
"""This class implements a bytecode cache that uses a memcache cache for | |
storing the information. It does not enforce a specific memcache library | |
(tummy's memcache or cmemcache) but will accept any class that provides | |
the minimal interface required. | |
Libraries compatible with this class: | |
- `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache | |
- `python-memcached <https://www.tummy.com/Community/software/python-memcached/>`_ | |
- `cmemcache <http://gijsbert.org/cmemcache/>`_ | |
(Unfortunately the django cache interface is not compatible because it | |
does not support storing binary data, only unicode. You can however pass | |
the underlying cache client to the bytecode cache which is available | |
as `django.core.cache.cache._client`.) | |
The minimal interface for the client passed to the constructor is this: | |
.. class:: MinimalClientInterface | |
.. method:: set(key, value[, timeout]) | |
Stores the bytecode in the cache. `value` is a string and | |
`timeout` the timeout of the key. If timeout is not provided | |
a default timeout or no timeout should be assumed, if it's | |
provided it's an integer with the number of seconds the cache | |
item should exist. | |
.. method:: get(key) | |
Returns the value for the cache key. If the item does not | |
exist in the cache the return value must be `None`. | |
The other arguments to the constructor are the prefix for all keys that | |
is added before the actual cache key and the timeout for the bytecode in | |
the cache system. We recommend a high (or no) timeout. | |
This bytecode cache does not support clearing of used items in the cache. | |
The clear method is a no-operation function. | |
.. versionadded:: 2.7 | |
Added support for ignoring memcache errors through the | |
`ignore_memcache_errors` parameter. | |
""" | |
def __init__(self, client, prefix='jinja2/bytecode/', timeout=None, | |
ignore_memcache_errors=True): | |
self.client = client | |
self.prefix = prefix | |
self.timeout = timeout | |
self.ignore_memcache_errors = ignore_memcache_errors | |
def load_bytecode(self, bucket): | |
try: | |
code = self.client.get(self.prefix + bucket.key) | |
except Exception: | |
if not self.ignore_memcache_errors: | |
raise | |
code = None | |
if code is not None: | |
bucket.bytecode_from_string(code) | |
def dump_bytecode(self, bucket): | |
args = (self.prefix + bucket.key, bucket.bytecode_to_string()) | |
if self.timeout is not None: | |
args += (self.timeout,) | |
try: | |
self.client.set(*args) | |
except Exception: | |
if not self.ignore_memcache_errors: | |
raise |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.compiler | |
~~~~~~~~~~~~~~~ | |
Compiles nodes into python code. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from itertools import chain | |
from copy import deepcopy | |
from keyword import iskeyword as is_python_keyword | |
from functools import update_wrapper | |
from jinja2 import nodes | |
from jinja2.nodes import EvalContext | |
from jinja2.visitor import NodeVisitor | |
from jinja2.optimizer import Optimizer | |
from jinja2.exceptions import TemplateAssertionError | |
from jinja2.utils import Markup, concat, escape | |
from jinja2._compat import range_type, text_type, string_types, \ | |
iteritems, NativeStringIO, imap, izip | |
from jinja2.idtracking import Symbols, VAR_LOAD_PARAMETER, \ | |
VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED | |
operators = { | |
'eq': '==', | |
'ne': '!=', | |
'gt': '>', | |
'gteq': '>=', | |
'lt': '<', | |
'lteq': '<=', | |
'in': 'in', | |
'notin': 'not in' | |
} | |
# what method to iterate over items do we want to use for dict iteration | |
# in generated code? on 2.x let's go with iteritems, on 3.x with items | |
if hasattr(dict, 'iteritems'): | |
dict_item_iter = 'iteritems' | |
else: | |
dict_item_iter = 'items' | |
code_features = ['division'] | |
# does this python version support generator stops? (PEP 0479) | |
try: | |
exec('from __future__ import generator_stop') | |
code_features.append('generator_stop') | |
except SyntaxError: | |
pass | |
# does this python version support yield from? | |
try: | |
exec('def f(): yield from x()') | |
except SyntaxError: | |
supports_yield_from = False | |
else: | |
supports_yield_from = True | |
def optimizeconst(f): | |
def new_func(self, node, frame, **kwargs): | |
# Only optimize if the frame is not volatile | |
if self.optimized and not frame.eval_ctx.volatile: | |
new_node = self.optimizer.visit(node, frame.eval_ctx) | |
if new_node != node: | |
return self.visit(new_node, frame) | |
return f(self, node, frame, **kwargs) | |
return update_wrapper(new_func, f) | |
def generate(node, environment, name, filename, stream=None, | |
defer_init=False, optimized=True): | |
"""Generate the python source for a node tree.""" | |
if not isinstance(node, nodes.Template): | |
raise TypeError('Can\'t compile non template nodes') | |
generator = environment.code_generator_class(environment, name, filename, | |
stream, defer_init, | |
optimized) | |
generator.visit(node) | |
if stream is None: | |
return generator.stream.getvalue() | |
def has_safe_repr(value): | |
"""Does the node have a safe representation?""" | |
if value is None or value is NotImplemented or value is Ellipsis: | |
return True | |
if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: | |
return True | |
if type(value) in (tuple, list, set, frozenset): | |
for item in value: | |
if not has_safe_repr(item): | |
return False | |
return True | |
elif type(value) is dict: | |
for key, value in iteritems(value): | |
if not has_safe_repr(key): | |
return False | |
if not has_safe_repr(value): | |
return False | |
return True | |
return False | |
def find_undeclared(nodes, names): | |
"""Check if the names passed are accessed undeclared. The return value | |
is a set of all the undeclared names from the sequence of names found. | |
""" | |
visitor = UndeclaredNameVisitor(names) | |
try: | |
for node in nodes: | |
visitor.visit(node) | |
except VisitorExit: | |
pass | |
return visitor.undeclared | |
class MacroRef(object): | |
def __init__(self, node): | |
self.node = node | |
self.accesses_caller = False | |
self.accesses_kwargs = False | |
self.accesses_varargs = False | |
class Frame(object): | |
"""Holds compile time information for us.""" | |
def __init__(self, eval_ctx, parent=None, level=None): | |
self.eval_ctx = eval_ctx | |
self.symbols = Symbols(parent and parent.symbols or None, | |
level=level) | |
# a toplevel frame is the root + soft frames such as if conditions. | |
self.toplevel = False | |
# the root frame is basically just the outermost frame, so no if | |
# conditions. This information is used to optimize inheritance | |
# situations. | |
self.rootlevel = False | |
# in some dynamic inheritance situations the compiler needs to add | |
# write tests around output statements. | |
self.require_output_check = parent and parent.require_output_check | |
# inside some tags we are using a buffer rather than yield statements. | |
# this for example affects {% filter %} or {% macro %}. If a frame | |
# is buffered this variable points to the name of the list used as | |
# buffer. | |
self.buffer = None | |
# the name of the block we're in, otherwise None. | |
self.block = parent and parent.block or None | |
# the parent of this frame | |
self.parent = parent | |
if parent is not None: | |
self.buffer = parent.buffer | |
def copy(self): | |
"""Create a copy of the current one.""" | |
rv = object.__new__(self.__class__) | |
rv.__dict__.update(self.__dict__) | |
rv.symbols = self.symbols.copy() | |
return rv | |
def inner(self, isolated=False): | |
"""Return an inner frame.""" | |
if isolated: | |
return Frame(self.eval_ctx, level=self.symbols.level + 1) | |
return Frame(self.eval_ctx, self) | |
def soft(self): | |
"""Return a soft frame. A soft frame may not be modified as | |
standalone thing as it shares the resources with the frame it | |
was created of, but it's not a rootlevel frame any longer. | |
This is only used to implement if-statements. | |
""" | |
rv = self.copy() | |
rv.rootlevel = False | |
return rv | |
__copy__ = copy | |
class VisitorExit(RuntimeError): | |
"""Exception used by the `UndeclaredNameVisitor` to signal a stop.""" | |
class DependencyFinderVisitor(NodeVisitor): | |
"""A visitor that collects filter and test calls.""" | |
def __init__(self): | |
self.filters = set() | |
self.tests = set() | |
def visit_Filter(self, node): | |
self.generic_visit(node) | |
self.filters.add(node.name) | |
def visit_Test(self, node): | |
self.generic_visit(node) | |
self.tests.add(node.name) | |
def visit_Block(self, node): | |
"""Stop visiting at blocks.""" | |
class UndeclaredNameVisitor(NodeVisitor): | |
"""A visitor that checks if a name is accessed without being | |
declared. This is different from the frame visitor as it will | |
not stop at closure frames. | |
""" | |
def __init__(self, names): | |
self.names = set(names) | |
self.undeclared = set() | |
def visit_Name(self, node): | |
if node.ctx == 'load' and node.name in self.names: | |
self.undeclared.add(node.name) | |
if self.undeclared == self.names: | |
raise VisitorExit() | |
else: | |
self.names.discard(node.name) | |
def visit_Block(self, node): | |
"""Stop visiting a blocks.""" | |
class CompilerExit(Exception): | |
"""Raised if the compiler encountered a situation where it just | |
doesn't make sense to further process the code. Any block that | |
raises such an exception is not further processed. | |
""" | |
class CodeGenerator(NodeVisitor): | |
def __init__(self, environment, name, filename, stream=None, | |
defer_init=False, optimized=True): | |
if stream is None: | |
stream = NativeStringIO() | |
self.environment = environment | |
self.name = name | |
self.filename = filename | |
self.stream = stream | |
self.created_block_context = False | |
self.defer_init = defer_init | |
self.optimized = optimized | |
if optimized: | |
self.optimizer = Optimizer(environment) | |
# aliases for imports | |
self.import_aliases = {} | |
# a registry for all blocks. Because blocks are moved out | |
# into the global python scope they are registered here | |
self.blocks = {} | |
# the number of extends statements so far | |
self.extends_so_far = 0 | |
# some templates have a rootlevel extends. In this case we | |
# can safely assume that we're a child template and do some | |
# more optimizations. | |
self.has_known_extends = False | |
# the current line number | |
self.code_lineno = 1 | |
# registry of all filters and tests (global, not block local) | |
self.tests = {} | |
self.filters = {} | |
# the debug information | |
self.debug_info = [] | |
self._write_debug_info = None | |
# the number of new lines before the next write() | |
self._new_lines = 0 | |
# the line number of the last written statement | |
self._last_line = 0 | |
# true if nothing was written so far. | |
self._first_write = True | |
# used by the `temporary_identifier` method to get new | |
# unique, temporary identifier | |
self._last_identifier = 0 | |
# the current indentation | |
self._indentation = 0 | |
# Tracks toplevel assignments | |
self._assign_stack = [] | |
# Tracks parameter definition blocks | |
self._param_def_block = [] | |
# Tracks the current context. | |
self._context_reference_stack = ['context'] | |
# -- Various compilation helpers | |
def fail(self, msg, lineno): | |
"""Fail with a :exc:`TemplateAssertionError`.""" | |
raise TemplateAssertionError(msg, lineno, self.name, self.filename) | |
def temporary_identifier(self): | |
"""Get a new unique identifier.""" | |
self._last_identifier += 1 | |
return 't_%d' % self._last_identifier | |
def buffer(self, frame): | |
"""Enable buffering for the frame from that point onwards.""" | |
frame.buffer = self.temporary_identifier() | |
self.writeline('%s = []' % frame.buffer) | |
def return_buffer_contents(self, frame, force_unescaped=False): | |
"""Return the buffer contents of the frame.""" | |
if not force_unescaped: | |
if frame.eval_ctx.volatile: | |
self.writeline('if context.eval_ctx.autoescape:') | |
self.indent() | |
self.writeline('return Markup(concat(%s))' % frame.buffer) | |
self.outdent() | |
self.writeline('else:') | |
self.indent() | |
self.writeline('return concat(%s)' % frame.buffer) | |
self.outdent() | |
return | |
elif frame.eval_ctx.autoescape: | |
self.writeline('return Markup(concat(%s))' % frame.buffer) | |
return | |
self.writeline('return concat(%s)' % frame.buffer) | |
def indent(self): | |
"""Indent by one.""" | |
self._indentation += 1 | |
def outdent(self, step=1): | |
"""Outdent by step.""" | |
self._indentation -= step | |
def start_write(self, frame, node=None): | |
"""Yield or write into the frame buffer.""" | |
if frame.buffer is None: | |
self.writeline('yield ', node) | |
else: | |
self.writeline('%s.append(' % frame.buffer, node) | |
def end_write(self, frame): | |
"""End the writing process started by `start_write`.""" | |
if frame.buffer is not None: | |
self.write(')') | |
def simple_write(self, s, frame, node=None): | |
"""Simple shortcut for start_write + write + end_write.""" | |
self.start_write(frame, node) | |
self.write(s) | |
self.end_write(frame) | |
def blockvisit(self, nodes, frame): | |
"""Visit a list of nodes as block in a frame. If the current frame | |
is no buffer a dummy ``if 0: yield None`` is written automatically. | |
""" | |
try: | |
self.writeline('pass') | |
for node in nodes: | |
self.visit(node, frame) | |
except CompilerExit: | |
pass | |
def write(self, x): | |
"""Write a string into the output stream.""" | |
if self._new_lines: | |
if not self._first_write: | |
self.stream.write('\n' * self._new_lines) | |
self.code_lineno += self._new_lines | |
if self._write_debug_info is not None: | |
self.debug_info.append((self._write_debug_info, | |
self.code_lineno)) | |
self._write_debug_info = None | |
self._first_write = False | |
self.stream.write(' ' * self._indentation) | |
self._new_lines = 0 | |
self.stream.write(x) | |
def writeline(self, x, node=None, extra=0): | |
"""Combination of newline and write.""" | |
self.newline(node, extra) | |
self.write(x) | |
def newline(self, node=None, extra=0): | |
"""Add one or more newlines before the next write.""" | |
self._new_lines = max(self._new_lines, 1 + extra) | |
if node is not None and node.lineno != self._last_line: | |
self._write_debug_info = node.lineno | |
self._last_line = node.lineno | |
def signature(self, node, frame, extra_kwargs=None): | |
"""Writes a function call to the stream for the current node. | |
A leading comma is added automatically. The extra keyword | |
arguments may not include python keywords otherwise a syntax | |
error could occour. The extra keyword arguments should be given | |
as python dict. | |
""" | |
# if any of the given keyword arguments is a python keyword | |
# we have to make sure that no invalid call is created. | |
kwarg_workaround = False | |
for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): | |
if is_python_keyword(kwarg): | |
kwarg_workaround = True | |
break | |
for arg in node.args: | |
self.write(', ') | |
self.visit(arg, frame) | |
if not kwarg_workaround: | |
for kwarg in node.kwargs: | |
self.write(', ') | |
self.visit(kwarg, frame) | |
if extra_kwargs is not None: | |
for key, value in iteritems(extra_kwargs): | |
self.write(', %s=%s' % (key, value)) | |
if node.dyn_args: | |
self.write(', *') | |
self.visit(node.dyn_args, frame) | |
if kwarg_workaround: | |
if node.dyn_kwargs is not None: | |
self.write(', **dict({') | |
else: | |
self.write(', **{') | |
for kwarg in node.kwargs: | |
self.write('%r: ' % kwarg.key) | |
self.visit(kwarg.value, frame) | |
self.write(', ') | |
if extra_kwargs is not None: | |
for key, value in iteritems(extra_kwargs): | |
self.write('%r: %s, ' % (key, value)) | |
if node.dyn_kwargs is not None: | |
self.write('}, **') | |
self.visit(node.dyn_kwargs, frame) | |
self.write(')') | |
else: | |
self.write('}') | |
elif node.dyn_kwargs is not None: | |
self.write(', **') | |
self.visit(node.dyn_kwargs, frame) | |
def pull_dependencies(self, nodes): | |
"""Pull all the dependencies.""" | |
visitor = DependencyFinderVisitor() | |
for node in nodes: | |
visitor.visit(node) | |
for dependency in 'filters', 'tests': | |
mapping = getattr(self, dependency) | |
for name in getattr(visitor, dependency): | |
if name not in mapping: | |
mapping[name] = self.temporary_identifier() | |
self.writeline('%s = environment.%s[%r]' % | |
(mapping[name], dependency, name)) | |
def enter_frame(self, frame): | |
undefs = [] | |
for target, (action, param) in iteritems(frame.symbols.loads): | |
if action == VAR_LOAD_PARAMETER: | |
pass | |
elif action == VAR_LOAD_RESOLVE: | |
self.writeline('%s = %s(%r)' % | |
(target, self.get_resolve_func(), param)) | |
elif action == VAR_LOAD_ALIAS: | |
self.writeline('%s = %s' % (target, param)) | |
elif action == VAR_LOAD_UNDEFINED: | |
undefs.append(target) | |
else: | |
raise NotImplementedError('unknown load instruction') | |
if undefs: | |
self.writeline('%s = missing' % ' = '.join(undefs)) | |
def leave_frame(self, frame, with_python_scope=False): | |
if not with_python_scope: | |
undefs = [] | |
for target, _ in iteritems(frame.symbols.loads): | |
undefs.append(target) | |
if undefs: | |
self.writeline('%s = missing' % ' = '.join(undefs)) | |
def func(self, name): | |
if self.environment.is_async: | |
return 'async def %s' % name | |
return 'def %s' % name | |
def macro_body(self, node, frame): | |
"""Dump the function def of a macro or call block.""" | |
frame = frame.inner() | |
frame.symbols.analyze_node(node) | |
macro_ref = MacroRef(node) | |
explicit_caller = None | |
skip_special_params = set() | |
args = [] | |
for idx, arg in enumerate(node.args): | |
if arg.name == 'caller': | |
explicit_caller = idx | |
if arg.name in ('kwargs', 'varargs'): | |
skip_special_params.add(arg.name) | |
args.append(frame.symbols.ref(arg.name)) | |
undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs')) | |
if 'caller' in undeclared: | |
# In older Jinja2 versions there was a bug that allowed caller | |
# to retain the special behavior even if it was mentioned in | |
# the argument list. However thankfully this was only really | |
# working if it was the last argument. So we are explicitly | |
# checking this now and error out if it is anywhere else in | |
# the argument list. | |
if explicit_caller is not None: | |
try: | |
node.defaults[explicit_caller - len(node.args)] | |
except IndexError: | |
self.fail('When defining macros or call blocks the ' | |
'special "caller" argument must be omitted ' | |
'or be given a default.', node.lineno) | |
else: | |
args.append(frame.symbols.declare_parameter('caller')) | |
macro_ref.accesses_caller = True | |
if 'kwargs' in undeclared and not 'kwargs' in skip_special_params: | |
args.append(frame.symbols.declare_parameter('kwargs')) | |
macro_ref.accesses_kwargs = True | |
if 'varargs' in undeclared and not 'varargs' in skip_special_params: | |
args.append(frame.symbols.declare_parameter('varargs')) | |
macro_ref.accesses_varargs = True | |
# macros are delayed, they never require output checks | |
frame.require_output_check = False | |
frame.symbols.analyze_node(node) | |
self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node) | |
self.indent() | |
self.buffer(frame) | |
self.enter_frame(frame) | |
self.push_parameter_definitions(frame) | |
for idx, arg in enumerate(node.args): | |
ref = frame.symbols.ref(arg.name) | |
self.writeline('if %s is missing:' % ref) | |
self.indent() | |
try: | |
default = node.defaults[idx - len(node.args)] | |
except IndexError: | |
self.writeline('%s = undefined(%r, name=%r)' % ( | |
ref, | |
'parameter %r was not provided' % arg.name, | |
arg.name)) | |
else: | |
self.writeline('%s = ' % ref) | |
self.visit(default, frame) | |
self.mark_parameter_stored(ref) | |
self.outdent() | |
self.pop_parameter_definitions() | |
self.blockvisit(node.body, frame) | |
self.return_buffer_contents(frame, force_unescaped=True) | |
self.leave_frame(frame, with_python_scope=True) | |
self.outdent() | |
return frame, macro_ref | |
def macro_def(self, macro_ref, frame): | |
"""Dump the macro definition for the def created by macro_body.""" | |
arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args) | |
name = getattr(macro_ref.node, 'name', None) | |
if len(macro_ref.node.args) == 1: | |
arg_tuple += ',' | |
self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, ' | |
'context.eval_ctx.autoescape)' % | |
(name, arg_tuple, macro_ref.accesses_kwargs, | |
macro_ref.accesses_varargs, macro_ref.accesses_caller)) | |
def position(self, node): | |
"""Return a human readable position for the node.""" | |
rv = 'line %d' % node.lineno | |
if self.name is not None: | |
rv += ' in ' + repr(self.name) | |
return rv | |
def dump_local_context(self, frame): | |
return '{%s}' % ', '.join( | |
'%r: %s' % (name, target) for name, target | |
in iteritems(frame.symbols.dump_stores())) | |
def write_commons(self): | |
"""Writes a common preamble that is used by root and block functions. | |
Primarily this sets up common local helpers and enforces a generator | |
through a dead branch. | |
""" | |
self.writeline('resolve = context.resolve_or_missing') | |
self.writeline('undefined = environment.undefined') | |
self.writeline('if 0: yield None') | |
def push_parameter_definitions(self, frame): | |
"""Pushes all parameter targets from the given frame into a local | |
stack that permits tracking of yet to be assigned parameters. In | |
particular this enables the optimization from `visit_Name` to skip | |
undefined expressions for parameters in macros as macros can reference | |
otherwise unbound parameters. | |
""" | |
self._param_def_block.append(frame.symbols.dump_param_targets()) | |
def pop_parameter_definitions(self): | |
"""Pops the current parameter definitions set.""" | |
self._param_def_block.pop() | |
def mark_parameter_stored(self, target): | |
"""Marks a parameter in the current parameter definitions as stored. | |
This will skip the enforced undefined checks. | |
""" | |
if self._param_def_block: | |
self._param_def_block[-1].discard(target) | |
def push_context_reference(self, target): | |
self._context_reference_stack.append(target) | |
def pop_context_reference(self): | |
self._context_reference_stack.pop() | |
def get_context_ref(self): | |
return self._context_reference_stack[-1] | |
def get_resolve_func(self): | |
target = self._context_reference_stack[-1] | |
if target == 'context': | |
return 'resolve' | |
return '%s.resolve' % target | |
def derive_context(self, frame): | |
return '%s.derived(%s)' % ( | |
self.get_context_ref(), | |
self.dump_local_context(frame), | |
) | |
def parameter_is_undeclared(self, target): | |
"""Checks if a given target is an undeclared parameter.""" | |
if not self._param_def_block: | |
return False | |
return target in self._param_def_block[-1] | |
def push_assign_tracking(self): | |
"""Pushes a new layer for assignment tracking.""" | |
self._assign_stack.append(set()) | |
def pop_assign_tracking(self, frame): | |
"""Pops the topmost level for assignment tracking and updates the | |
context variables if necessary. | |
""" | |
vars = self._assign_stack.pop() | |
if not frame.toplevel or not vars: | |
return | |
public_names = [x for x in vars if x[:1] != '_'] | |
if len(vars) == 1: | |
name = next(iter(vars)) | |
ref = frame.symbols.ref(name) | |
self.writeline('context.vars[%r] = %s' % (name, ref)) | |
else: | |
self.writeline('context.vars.update({') | |
for idx, name in enumerate(vars): | |
if idx: | |
self.write(', ') | |
ref = frame.symbols.ref(name) | |
self.write('%r: %s' % (name, ref)) | |
self.write('})') | |
if public_names: | |
if len(public_names) == 1: | |
self.writeline('context.exported_vars.add(%r)' % | |
public_names[0]) | |
else: | |
self.writeline('context.exported_vars.update((%s))' % | |
', '.join(imap(repr, public_names))) | |
# -- Statement Visitors | |
def visit_Template(self, node, frame=None): | |
assert frame is None, 'no root frame allowed' | |
eval_ctx = EvalContext(self.environment, self.name) | |
from jinja2.runtime import __all__ as exported | |
self.writeline('from __future__ import %s' % ', '.join(code_features)) | |
self.writeline('from jinja2.runtime import ' + ', '.join(exported)) | |
if self.environment.is_async: | |
self.writeline('from jinja2.asyncsupport import auto_await, ' | |
'auto_aiter, make_async_loop_context') | |
# if we want a deferred initialization we cannot move the | |
# environment into a local name | |
envenv = not self.defer_init and ', environment=environment' or '' | |
# do we have an extends tag at all? If not, we can save some | |
# overhead by just not processing any inheritance code. | |
have_extends = node.find(nodes.Extends) is not None | |
# find all blocks | |
for block in node.find_all(nodes.Block): | |
if block.name in self.blocks: | |
self.fail('block %r defined twice' % block.name, block.lineno) | |
self.blocks[block.name] = block | |
# find all imports and import them | |
for import_ in node.find_all(nodes.ImportedName): | |
if import_.importname not in self.import_aliases: | |
imp = import_.importname | |
self.import_aliases[imp] = alias = self.temporary_identifier() | |
if '.' in imp: | |
module, obj = imp.rsplit('.', 1) | |
self.writeline('from %s import %s as %s' % | |
(module, obj, alias)) | |
else: | |
self.writeline('import %s as %s' % (imp, alias)) | |
# add the load name | |
self.writeline('name = %r' % self.name) | |
# generate the root render function. | |
self.writeline('%s(context, missing=missing%s):' % | |
(self.func('root'), envenv), extra=1) | |
self.indent() | |
self.write_commons() | |
# process the root | |
frame = Frame(eval_ctx) | |
if 'self' in find_undeclared(node.body, ('self',)): | |
ref = frame.symbols.declare_parameter('self') | |
self.writeline('%s = TemplateReference(context)' % ref) | |
frame.symbols.analyze_node(node) | |
frame.toplevel = frame.rootlevel = True | |
frame.require_output_check = have_extends and not self.has_known_extends | |
if have_extends: | |
self.writeline('parent_template = None') | |
self.enter_frame(frame) | |
self.pull_dependencies(node.body) | |
self.blockvisit(node.body, frame) | |
self.leave_frame(frame, with_python_scope=True) | |
self.outdent() | |
# make sure that the parent root is called. | |
if have_extends: | |
if not self.has_known_extends: | |
self.indent() | |
self.writeline('if parent_template is not None:') | |
self.indent() | |
if supports_yield_from and not self.environment.is_async: | |
self.writeline('yield from parent_template.' | |
'root_render_func(context)') | |
else: | |
self.writeline('%sfor event in parent_template.' | |
'root_render_func(context):' % | |
(self.environment.is_async and 'async ' or '')) | |
self.indent() | |
self.writeline('yield event') | |
self.outdent() | |
self.outdent(1 + (not self.has_known_extends)) | |
# at this point we now have the blocks collected and can visit them too. | |
for name, block in iteritems(self.blocks): | |
self.writeline('%s(context, missing=missing%s):' % | |
(self.func('block_' + name), envenv), | |
block, 1) | |
self.indent() | |
self.write_commons() | |
# It's important that we do not make this frame a child of the | |
# toplevel template. This would cause a variety of | |
# interesting issues with identifier tracking. | |
block_frame = Frame(eval_ctx) | |
undeclared = find_undeclared(block.body, ('self', 'super')) | |
if 'self' in undeclared: | |
ref = block_frame.symbols.declare_parameter('self') | |
self.writeline('%s = TemplateReference(context)' % ref) | |
if 'super' in undeclared: | |
ref = block_frame.symbols.declare_parameter('super') | |
self.writeline('%s = context.super(%r, ' | |
'block_%s)' % (ref, name, name)) | |
block_frame.symbols.analyze_node(block) | |
block_frame.block = name | |
self.enter_frame(block_frame) | |
self.pull_dependencies(block.body) | |
self.blockvisit(block.body, block_frame) | |
self.leave_frame(block_frame, with_python_scope=True) | |
self.outdent() | |
self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x) | |
for x in self.blocks), | |
extra=1) | |
# add a function that returns the debug info | |
self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x | |
in self.debug_info)) | |
def visit_Block(self, node, frame): | |
"""Call a block and register it for the template.""" | |
level = 0 | |
if frame.toplevel: | |
# if we know that we are a child template, there is no need to | |
# check if we are one | |
if self.has_known_extends: | |
return | |
if self.extends_so_far > 0: | |
self.writeline('if parent_template is None:') | |
self.indent() | |
level += 1 | |
if node.scoped: | |
context = self.derive_context(frame) | |
else: | |
context = self.get_context_ref() | |
if supports_yield_from and not self.environment.is_async and \ | |
frame.buffer is None: | |
self.writeline('yield from context.blocks[%r][0](%s)' % ( | |
node.name, context), node) | |
else: | |
loop = self.environment.is_async and 'async for' or 'for' | |
self.writeline('%s event in context.blocks[%r][0](%s):' % ( | |
loop, node.name, context), node) | |
self.indent() | |
self.simple_write('event', frame) | |
self.outdent() | |
self.outdent(level) | |
def visit_Extends(self, node, frame): | |
"""Calls the extender.""" | |
if not frame.toplevel: | |
self.fail('cannot use extend from a non top-level scope', | |
node.lineno) | |
# if the number of extends statements in general is zero so | |
# far, we don't have to add a check if something extended | |
# the template before this one. | |
if self.extends_so_far > 0: | |
# if we have a known extends we just add a template runtime | |
# error into the generated code. We could catch that at compile | |
# time too, but i welcome it not to confuse users by throwing the | |
# same error at different times just "because we can". | |
if not self.has_known_extends: | |
self.writeline('if parent_template is not None:') | |
self.indent() | |
self.writeline('raise TemplateRuntimeError(%r)' % | |
'extended multiple times') | |
# if we have a known extends already we don't need that code here | |
# as we know that the template execution will end here. | |
if self.has_known_extends: | |
raise CompilerExit() | |
else: | |
self.outdent() | |
self.writeline('parent_template = environment.get_template(', node) | |
self.visit(node.template, frame) | |
self.write(', %r)' % self.name) | |
self.writeline('for name, parent_block in parent_template.' | |
'blocks.%s():' % dict_item_iter) | |
self.indent() | |
self.writeline('context.blocks.setdefault(name, []).' | |
'append(parent_block)') | |
self.outdent() | |
# if this extends statement was in the root level we can take | |
# advantage of that information and simplify the generated code | |
# in the top level from this point onwards | |
if frame.rootlevel: | |
self.has_known_extends = True | |
# and now we have one more | |
self.extends_so_far += 1 | |
def visit_Include(self, node, frame): | |
"""Handles includes.""" | |
if node.ignore_missing: | |
self.writeline('try:') | |
self.indent() | |
func_name = 'get_or_select_template' | |
if isinstance(node.template, nodes.Const): | |
if isinstance(node.template.value, string_types): | |
func_name = 'get_template' | |
elif isinstance(node.template.value, (tuple, list)): | |
func_name = 'select_template' | |
elif isinstance(node.template, (nodes.Tuple, nodes.List)): | |
func_name = 'select_template' | |
self.writeline('template = environment.%s(' % func_name, node) | |
self.visit(node.template, frame) | |
self.write(', %r)' % self.name) | |
if node.ignore_missing: | |
self.outdent() | |
self.writeline('except TemplateNotFound:') | |
self.indent() | |
self.writeline('pass') | |
self.outdent() | |
self.writeline('else:') | |
self.indent() | |
skip_event_yield = False | |
if node.with_context: | |
loop = self.environment.is_async and 'async for' or 'for' | |
self.writeline('%s event in template.root_render_func(' | |
'template.new_context(context.get_all(), True, ' | |
'%s)):' % (loop, self.dump_local_context(frame))) | |
elif self.environment.is_async: | |
self.writeline('for event in (await ' | |
'template._get_default_module_async())' | |
'._body_stream:') | |
else: | |
if supports_yield_from: | |
self.writeline('yield from template._get_default_module()' | |
'._body_stream') | |
skip_event_yield = True | |
else: | |
self.writeline('for event in template._get_default_module()' | |
'._body_stream:') | |
if not skip_event_yield: | |
self.indent() | |
self.simple_write('event', frame) | |
self.outdent() | |
if node.ignore_missing: | |
self.outdent() | |
def visit_Import(self, node, frame): | |
"""Visit regular imports.""" | |
self.writeline('%s = ' % frame.symbols.ref(node.target), node) | |
if frame.toplevel: | |
self.write('context.vars[%r] = ' % node.target) | |
if self.environment.is_async: | |
self.write('await ') | |
self.write('environment.get_template(') | |
self.visit(node.template, frame) | |
self.write(', %r).' % self.name) | |
if node.with_context: | |
self.write('make_module%s(context.get_all(), True, %s)' | |
% (self.environment.is_async and '_async' or '', | |
self.dump_local_context(frame))) | |
elif self.environment.is_async: | |
self.write('_get_default_module_async()') | |
else: | |
self.write('_get_default_module()') | |
if frame.toplevel and not node.target.startswith('_'): | |
self.writeline('context.exported_vars.discard(%r)' % node.target) | |
def visit_FromImport(self, node, frame): | |
"""Visit named imports.""" | |
self.newline(node) | |
self.write('included_template = %senvironment.get_template(' | |
% (self.environment.is_async and 'await ' or '')) | |
self.visit(node.template, frame) | |
self.write(', %r).' % self.name) | |
if node.with_context: | |
self.write('make_module%s(context.get_all(), True, %s)' | |
% (self.environment.is_async and '_async' or '', | |
self.dump_local_context(frame))) | |
elif self.environment.is_async: | |
self.write('_get_default_module_async()') | |
else: | |
self.write('_get_default_module()') | |
var_names = [] | |
discarded_names = [] | |
for name in node.names: | |
if isinstance(name, tuple): | |
name, alias = name | |
else: | |
alias = name | |
self.writeline('%s = getattr(included_template, ' | |
'%r, missing)' % (frame.symbols.ref(alias), name)) | |
self.writeline('if %s is missing:' % frame.symbols.ref(alias)) | |
self.indent() | |
self.writeline('%s = undefined(%r %% ' | |
'included_template.__name__, ' | |
'name=%r)' % | |
(frame.symbols.ref(alias), | |
'the template %%r (imported on %s) does ' | |
'not export the requested name %s' % ( | |
self.position(node), | |
repr(name) | |
), name)) | |
self.outdent() | |
if frame.toplevel: | |
var_names.append(alias) | |
if not alias.startswith('_'): | |
discarded_names.append(alias) | |
if var_names: | |
if len(var_names) == 1: | |
name = var_names[0] | |
self.writeline('context.vars[%r] = %s' % | |
(name, frame.symbols.ref(name))) | |
else: | |
self.writeline('context.vars.update({%s})' % ', '.join( | |
'%r: %s' % (name, frame.symbols.ref(name)) for name in var_names | |
)) | |
if discarded_names: | |
if len(discarded_names) == 1: | |
self.writeline('context.exported_vars.discard(%r)' % | |
discarded_names[0]) | |
else: | |
self.writeline('context.exported_vars.difference_' | |
'update((%s))' % ', '.join(imap(repr, discarded_names))) | |
def visit_For(self, node, frame): | |
loop_frame = frame.inner() | |
test_frame = frame.inner() | |
else_frame = frame.inner() | |
# try to figure out if we have an extended loop. An extended loop | |
# is necessary if the loop is in recursive mode if the special loop | |
# variable is accessed in the body. | |
extended_loop = node.recursive or 'loop' in \ | |
find_undeclared(node.iter_child_nodes( | |
only=('body',)), ('loop',)) | |
loop_ref = None | |
if extended_loop: | |
loop_ref = loop_frame.symbols.declare_parameter('loop') | |
loop_frame.symbols.analyze_node(node, for_branch='body') | |
if node.else_: | |
else_frame.symbols.analyze_node(node, for_branch='else') | |
if node.test: | |
loop_filter_func = self.temporary_identifier() | |
test_frame.symbols.analyze_node(node, for_branch='test') | |
self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test) | |
self.indent() | |
self.enter_frame(test_frame) | |
self.writeline(self.environment.is_async and 'async for ' or 'for ') | |
self.visit(node.target, loop_frame) | |
self.write(' in ') | |
self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter') | |
self.write(':') | |
self.indent() | |
self.writeline('if ', node.test) | |
self.visit(node.test, test_frame) | |
self.write(':') | |
self.indent() | |
self.writeline('yield ') | |
self.visit(node.target, loop_frame) | |
self.outdent(3) | |
self.leave_frame(test_frame, with_python_scope=True) | |
# if we don't have an recursive loop we have to find the shadowed | |
# variables at that point. Because loops can be nested but the loop | |
# variable is a special one we have to enforce aliasing for it. | |
if node.recursive: | |
self.writeline('%s(reciter, loop_render_func, depth=0):' % | |
self.func('loop'), node) | |
self.indent() | |
self.buffer(loop_frame) | |
# Use the same buffer for the else frame | |
else_frame.buffer = loop_frame.buffer | |
# make sure the loop variable is a special one and raise a template | |
# assertion error if a loop tries to write to loop | |
if extended_loop: | |
self.writeline('%s = missing' % loop_ref) | |
for name in node.find_all(nodes.Name): | |
if name.ctx == 'store' and name.name == 'loop': | |
self.fail('Can\'t assign to special loop variable ' | |
'in for-loop target', name.lineno) | |
if node.else_: | |
iteration_indicator = self.temporary_identifier() | |
self.writeline('%s = 1' % iteration_indicator) | |
self.writeline(self.environment.is_async and 'async for ' or 'for ', node) | |
self.visit(node.target, loop_frame) | |
if extended_loop: | |
if self.environment.is_async: | |
self.write(', %s in await make_async_loop_context(' % loop_ref) | |
else: | |
self.write(', %s in LoopContext(' % loop_ref) | |
else: | |
self.write(' in ') | |
if node.test: | |
self.write('%s(' % loop_filter_func) | |
if node.recursive: | |
self.write('reciter') | |
else: | |
if self.environment.is_async and not extended_loop: | |
self.write('auto_aiter(') | |
self.visit(node.iter, frame) | |
if self.environment.is_async and not extended_loop: | |
self.write(')') | |
if node.test: | |
self.write(')') | |
if node.recursive: | |
self.write(', undefined, loop_render_func, depth):') | |
else: | |
self.write(extended_loop and ', undefined):' or ':') | |
self.indent() | |
self.enter_frame(loop_frame) | |
self.blockvisit(node.body, loop_frame) | |
if node.else_: | |
self.writeline('%s = 0' % iteration_indicator) | |
self.outdent() | |
self.leave_frame(loop_frame, with_python_scope=node.recursive | |
and not node.else_) | |
if node.else_: | |
self.writeline('if %s:' % iteration_indicator) | |
self.indent() | |
self.enter_frame(else_frame) | |
self.blockvisit(node.else_, else_frame) | |
self.leave_frame(else_frame) | |
self.outdent() | |
# if the node was recursive we have to return the buffer contents | |
# and start the iteration code | |
if node.recursive: | |
self.return_buffer_contents(loop_frame) | |
self.outdent() | |
self.start_write(frame, node) | |
if self.environment.is_async: | |
self.write('await ') | |
self.write('loop(') | |
if self.environment.is_async: | |
self.write('auto_aiter(') | |
self.visit(node.iter, frame) | |
if self.environment.is_async: | |
self.write(')') | |
self.write(', loop)') | |
self.end_write(frame) | |
def visit_If(self, node, frame): | |
if_frame = frame.soft() | |
self.writeline('if ', node) | |
self.visit(node.test, if_frame) | |
self.write(':') | |
self.indent() | |
self.blockvisit(node.body, if_frame) | |
self.outdent() | |
for elif_ in node.elif_: | |
self.writeline('elif ', elif_) | |
self.visit(elif_.test, if_frame) | |
self.write(':') | |
self.indent() | |
self.blockvisit(elif_.body, if_frame) | |
self.outdent() | |
if node.else_: | |
self.writeline('else:') | |
self.indent() | |
self.blockvisit(node.else_, if_frame) | |
self.outdent() | |
def visit_Macro(self, node, frame): | |
macro_frame, macro_ref = self.macro_body(node, frame) | |
self.newline() | |
if frame.toplevel: | |
if not node.name.startswith('_'): | |
self.write('context.exported_vars.add(%r)' % node.name) | |
ref = frame.symbols.ref(node.name) | |
self.writeline('context.vars[%r] = ' % node.name) | |
self.write('%s = ' % frame.symbols.ref(node.name)) | |
self.macro_def(macro_ref, macro_frame) | |
def visit_CallBlock(self, node, frame): | |
call_frame, macro_ref = self.macro_body(node, frame) | |
self.writeline('caller = ') | |
self.macro_def(macro_ref, call_frame) | |
self.start_write(frame, node) | |
self.visit_Call(node.call, frame, forward_caller=True) | |
self.end_write(frame) | |
def visit_FilterBlock(self, node, frame): | |
filter_frame = frame.inner() | |
filter_frame.symbols.analyze_node(node) | |
self.enter_frame(filter_frame) | |
self.buffer(filter_frame) | |
self.blockvisit(node.body, filter_frame) | |
self.start_write(frame, node) | |
self.visit_Filter(node.filter, filter_frame) | |
self.end_write(frame) | |
self.leave_frame(filter_frame) | |
def visit_With(self, node, frame): | |
with_frame = frame.inner() | |
with_frame.symbols.analyze_node(node) | |
self.enter_frame(with_frame) | |
for idx, (target, expr) in enumerate(izip(node.targets, node.values)): | |
self.newline() | |
self.visit(target, with_frame) | |
self.write(' = ') | |
self.visit(expr, frame) | |
self.blockvisit(node.body, with_frame) | |
self.leave_frame(with_frame) | |
def visit_ExprStmt(self, node, frame): | |
self.newline(node) | |
self.visit(node.node, frame) | |
def visit_Output(self, node, frame): | |
# if we have a known extends statement, we don't output anything | |
# if we are in a require_output_check section | |
if self.has_known_extends and frame.require_output_check: | |
return | |
allow_constant_finalize = True | |
if self.environment.finalize: | |
func = self.environment.finalize | |
if getattr(func, 'contextfunction', False) or \ | |
getattr(func, 'evalcontextfunction', False): | |
allow_constant_finalize = False | |
elif getattr(func, 'environmentfunction', False): | |
finalize = lambda x: text_type( | |
self.environment.finalize(self.environment, x)) | |
else: | |
finalize = lambda x: text_type(self.environment.finalize(x)) | |
else: | |
finalize = text_type | |
# if we are inside a frame that requires output checking, we do so | |
outdent_later = False | |
if frame.require_output_check: | |
self.writeline('if parent_template is None:') | |
self.indent() | |
outdent_later = True | |
# try to evaluate as many chunks as possible into a static | |
# string at compile time. | |
body = [] | |
for child in node.nodes: | |
try: | |
if not allow_constant_finalize: | |
raise nodes.Impossible() | |
const = child.as_const(frame.eval_ctx) | |
except nodes.Impossible: | |
body.append(child) | |
continue | |
# the frame can't be volatile here, becaus otherwise the | |
# as_const() function would raise an Impossible exception | |
# at that point. | |
try: | |
if frame.eval_ctx.autoescape: | |
if hasattr(const, '__html__'): | |
const = const.__html__() | |
else: | |
const = escape(const) | |
const = finalize(const) | |
except Exception: | |
# if something goes wrong here we evaluate the node | |
# at runtime for easier debugging | |
body.append(child) | |
continue | |
if body and isinstance(body[-1], list): | |
body[-1].append(const) | |
else: | |
body.append([const]) | |
# if we have less than 3 nodes or a buffer we yield or extend/append | |
if len(body) < 3 or frame.buffer is not None: | |
if frame.buffer is not None: | |
# for one item we append, for more we extend | |
if len(body) == 1: | |
self.writeline('%s.append(' % frame.buffer) | |
else: | |
self.writeline('%s.extend((' % frame.buffer) | |
self.indent() | |
for item in body: | |
if isinstance(item, list): | |
val = repr(concat(item)) | |
if frame.buffer is None: | |
self.writeline('yield ' + val) | |
else: | |
self.writeline(val + ',') | |
else: | |
if frame.buffer is None: | |
self.writeline('yield ', item) | |
else: | |
self.newline(item) | |
close = 1 | |
if frame.eval_ctx.volatile: | |
self.write('(escape if context.eval_ctx.autoescape' | |
' else to_string)(') | |
elif frame.eval_ctx.autoescape: | |
self.write('escape(') | |
else: | |
self.write('to_string(') | |
if self.environment.finalize is not None: | |
self.write('environment.finalize(') | |
if getattr(self.environment.finalize, | |
"contextfunction", False): | |
self.write('context, ') | |
close += 1 | |
self.visit(item, frame) | |
self.write(')' * close) | |
if frame.buffer is not None: | |
self.write(',') | |
if frame.buffer is not None: | |
# close the open parentheses | |
self.outdent() | |
self.writeline(len(body) == 1 and ')' or '))') | |
# otherwise we create a format string as this is faster in that case | |
else: | |
format = [] | |
arguments = [] | |
for item in body: | |
if isinstance(item, list): | |
format.append(concat(item).replace('%', '%%')) | |
else: | |
format.append('%s') | |
arguments.append(item) | |
self.writeline('yield ') | |
self.write(repr(concat(format)) + ' % (') | |
self.indent() | |
for argument in arguments: | |
self.newline(argument) | |
close = 0 | |
if frame.eval_ctx.volatile: | |
self.write('(escape if context.eval_ctx.autoescape else' | |
' to_string)(') | |
close += 1 | |
elif frame.eval_ctx.autoescape: | |
self.write('escape(') | |
close += 1 | |
if self.environment.finalize is not None: | |
self.write('environment.finalize(') | |
if getattr(self.environment.finalize, | |
'contextfunction', False): | |
self.write('context, ') | |
elif getattr(self.environment.finalize, | |
'evalcontextfunction', False): | |
self.write('context.eval_ctx, ') | |
elif getattr(self.environment.finalize, | |
'environmentfunction', False): | |
self.write('environment, ') | |
close += 1 | |
self.visit(argument, frame) | |
self.write(')' * close + ', ') | |
self.outdent() | |
self.writeline(')') | |
if outdent_later: | |
self.outdent() | |
def visit_Assign(self, node, frame): | |
self.push_assign_tracking() | |
self.newline(node) | |
self.visit(node.target, frame) | |
self.write(' = ') | |
self.visit(node.node, frame) | |
self.pop_assign_tracking(frame) | |
def visit_AssignBlock(self, node, frame): | |
self.push_assign_tracking() | |
block_frame = frame.inner() | |
# This is a special case. Since a set block always captures we | |
# will disable output checks. This way one can use set blocks | |
# toplevel even in extended templates. | |
block_frame.require_output_check = False | |
block_frame.symbols.analyze_node(node) | |
self.enter_frame(block_frame) | |
self.buffer(block_frame) | |
self.blockvisit(node.body, block_frame) | |
self.newline(node) | |
self.visit(node.target, frame) | |
self.write(' = (Markup if context.eval_ctx.autoescape ' | |
'else identity)(') | |
if node.filter is not None: | |
self.visit_Filter(node.filter, block_frame) | |
else: | |
self.write('concat(%s)' % block_frame.buffer) | |
self.write(')') | |
self.pop_assign_tracking(frame) | |
self.leave_frame(block_frame) | |
# -- Expression Visitors | |
def visit_Name(self, node, frame): | |
if node.ctx == 'store' and frame.toplevel: | |
if self._assign_stack: | |
self._assign_stack[-1].add(node.name) | |
ref = frame.symbols.ref(node.name) | |
# If we are looking up a variable we might have to deal with the | |
# case where it's undefined. We can skip that case if the load | |
# instruction indicates a parameter which are always defined. | |
if node.ctx == 'load': | |
load = frame.symbols.find_load(ref) | |
if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \ | |
not self.parameter_is_undeclared(ref)): | |
self.write('(undefined(name=%r) if %s is missing else %s)' % | |
(node.name, ref, ref)) | |
return | |
self.write(ref) | |
def visit_NSRef(self, node, frame): | |
# NSRefs can only be used to store values; since they use the normal | |
# `foo.bar` notation they will be parsed as a normal attribute access | |
# when used anywhere but in a `set` context | |
ref = frame.symbols.ref(node.name) | |
self.writeline('if not isinstance(%s, Namespace):' % ref) | |
self.indent() | |
self.writeline('raise TemplateRuntimeError(%r)' % | |
'cannot assign attribute on non-namespace object') | |
self.outdent() | |
self.writeline('%s[%r]' % (ref, node.attr)) | |
def visit_Const(self, node, frame): | |
val = node.as_const(frame.eval_ctx) | |
if isinstance(val, float): | |
self.write(str(val)) | |
else: | |
self.write(repr(val)) | |
def visit_TemplateData(self, node, frame): | |
try: | |
self.write(repr(node.as_const(frame.eval_ctx))) | |
except nodes.Impossible: | |
self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)' | |
% node.data) | |
def visit_Tuple(self, node, frame): | |
self.write('(') | |
idx = -1 | |
for idx, item in enumerate(node.items): | |
if idx: | |
self.write(', ') | |
self.visit(item, frame) | |
self.write(idx == 0 and ',)' or ')') | |
def visit_List(self, node, frame): | |
self.write('[') | |
for idx, item in enumerate(node.items): | |
if idx: | |
self.write(', ') | |
self.visit(item, frame) | |
self.write(']') | |
def visit_Dict(self, node, frame): | |
self.write('{') | |
for idx, item in enumerate(node.items): | |
if idx: | |
self.write(', ') | |
self.visit(item.key, frame) | |
self.write(': ') | |
self.visit(item.value, frame) | |
self.write('}') | |
def binop(operator, interceptable=True): | |
@optimizeconst | |
def visitor(self, node, frame): | |
if self.environment.sandboxed and \ | |
operator in self.environment.intercepted_binops: | |
self.write('environment.call_binop(context, %r, ' % operator) | |
self.visit(node.left, frame) | |
self.write(', ') | |
self.visit(node.right, frame) | |
else: | |
self.write('(') | |
self.visit(node.left, frame) | |
self.write(' %s ' % operator) | |
self.visit(node.right, frame) | |
self.write(')') | |
return visitor | |
def uaop(operator, interceptable=True): | |
@optimizeconst | |
def visitor(self, node, frame): | |
if self.environment.sandboxed and \ | |
operator in self.environment.intercepted_unops: | |
self.write('environment.call_unop(context, %r, ' % operator) | |
self.visit(node.node, frame) | |
else: | |
self.write('(' + operator) | |
self.visit(node.node, frame) | |
self.write(')') | |
return visitor | |
visit_Add = binop('+') | |
visit_Sub = binop('-') | |
visit_Mul = binop('*') | |
visit_Div = binop('/') | |
visit_FloorDiv = binop('//') | |
visit_Pow = binop('**') | |
visit_Mod = binop('%') | |
visit_And = binop('and', interceptable=False) | |
visit_Or = binop('or', interceptable=False) | |
visit_Pos = uaop('+') | |
visit_Neg = uaop('-') | |
visit_Not = uaop('not ', interceptable=False) | |
del binop, uaop | |
@optimizeconst | |
def visit_Concat(self, node, frame): | |
if frame.eval_ctx.volatile: | |
func_name = '(context.eval_ctx.volatile and' \ | |
' markup_join or unicode_join)' | |
elif frame.eval_ctx.autoescape: | |
func_name = 'markup_join' | |
else: | |
func_name = 'unicode_join' | |
self.write('%s((' % func_name) | |
for arg in node.nodes: | |
self.visit(arg, frame) | |
self.write(', ') | |
self.write('))') | |
@optimizeconst | |
def visit_Compare(self, node, frame): | |
self.visit(node.expr, frame) | |
for op in node.ops: | |
self.visit(op, frame) | |
def visit_Operand(self, node, frame): | |
self.write(' %s ' % operators[node.op]) | |
self.visit(node.expr, frame) | |
@optimizeconst | |
def visit_Getattr(self, node, frame): | |
self.write('environment.getattr(') | |
self.visit(node.node, frame) | |
self.write(', %r)' % node.attr) | |
@optimizeconst | |
def visit_Getitem(self, node, frame): | |
# slices bypass the environment getitem method. | |
if isinstance(node.arg, nodes.Slice): | |
self.visit(node.node, frame) | |
self.write('[') | |
self.visit(node.arg, frame) | |
self.write(']') | |
else: | |
self.write('environment.getitem(') | |
self.visit(node.node, frame) | |
self.write(', ') | |
self.visit(node.arg, frame) | |
self.write(')') | |
def visit_Slice(self, node, frame): | |
if node.start is not None: | |
self.visit(node.start, frame) | |
self.write(':') | |
if node.stop is not None: | |
self.visit(node.stop, frame) | |
if node.step is not None: | |
self.write(':') | |
self.visit(node.step, frame) | |
@optimizeconst | |
def visit_Filter(self, node, frame): | |
if self.environment.is_async: | |
self.write('await auto_await(') | |
self.write(self.filters[node.name] + '(') | |
func = self.environment.filters.get(node.name) | |
if func is None: | |
self.fail('no filter named %r' % node.name, node.lineno) | |
if getattr(func, 'contextfilter', False): | |
self.write('context, ') | |
elif getattr(func, 'evalcontextfilter', False): | |
self.write('context.eval_ctx, ') | |
elif getattr(func, 'environmentfilter', False): | |
self.write('environment, ') | |
# if the filter node is None we are inside a filter block | |
# and want to write to the current buffer | |
if node.node is not None: | |
self.visit(node.node, frame) | |
elif frame.eval_ctx.volatile: | |
self.write('(context.eval_ctx.autoescape and' | |
' Markup(concat(%s)) or concat(%s))' % | |
(frame.buffer, frame.buffer)) | |
elif frame.eval_ctx.autoescape: | |
self.write('Markup(concat(%s))' % frame.buffer) | |
else: | |
self.write('concat(%s)' % frame.buffer) | |
self.signature(node, frame) | |
self.write(')') | |
if self.environment.is_async: | |
self.write(')') | |
@optimizeconst | |
def visit_Test(self, node, frame): | |
self.write(self.tests[node.name] + '(') | |
if node.name not in self.environment.tests: | |
self.fail('no test named %r' % node.name, node.lineno) | |
self.visit(node.node, frame) | |
self.signature(node, frame) | |
self.write(')') | |
@optimizeconst | |
def visit_CondExpr(self, node, frame): | |
def write_expr2(): | |
if node.expr2 is not None: | |
return self.visit(node.expr2, frame) | |
self.write('undefined(%r)' % ('the inline if-' | |
'expression on %s evaluated to false and ' | |
'no else section was defined.' % self.position(node))) | |
self.write('(') | |
self.visit(node.expr1, frame) | |
self.write(' if ') | |
self.visit(node.test, frame) | |
self.write(' else ') | |
write_expr2() | |
self.write(')') | |
@optimizeconst | |
def visit_Call(self, node, frame, forward_caller=False): | |
if self.environment.is_async: | |
self.write('await auto_await(') | |
if self.environment.sandboxed: | |
self.write('environment.call(context, ') | |
else: | |
self.write('context.call(') | |
self.visit(node.node, frame) | |
extra_kwargs = forward_caller and {'caller': 'caller'} or None | |
self.signature(node, frame, extra_kwargs) | |
self.write(')') | |
if self.environment.is_async: | |
self.write(')') | |
def visit_Keyword(self, node, frame): | |
self.write(node.key + '=') | |
self.visit(node.value, frame) | |
# -- Unused nodes for extensions | |
def visit_MarkSafe(self, node, frame): | |
self.write('Markup(') | |
self.visit(node.expr, frame) | |
self.write(')') | |
def visit_MarkSafeIfAutoescape(self, node, frame): | |
self.write('(context.eval_ctx.autoescape and Markup or identity)(') | |
self.visit(node.expr, frame) | |
self.write(')') | |
def visit_EnvironmentAttribute(self, node, frame): | |
self.write('environment.' + node.name) | |
def visit_ExtensionAttribute(self, node, frame): | |
self.write('environment.extensions[%r].%s' % (node.identifier, node.name)) | |
def visit_ImportedName(self, node, frame): | |
self.write(self.import_aliases[node.importname]) | |
def visit_InternalName(self, node, frame): | |
self.write(node.name) | |
def visit_ContextReference(self, node, frame): | |
self.write('context') | |
def visit_Continue(self, node, frame): | |
self.writeline('continue', node) | |
def visit_Break(self, node, frame): | |
self.writeline('break', node) | |
def visit_Scope(self, node, frame): | |
scope_frame = frame.inner() | |
scope_frame.symbols.analyze_node(node) | |
self.enter_frame(scope_frame) | |
self.blockvisit(node.body, scope_frame) | |
self.leave_frame(scope_frame) | |
def visit_OverlayScope(self, node, frame): | |
ctx = self.temporary_identifier() | |
self.writeline('%s = %s' % (ctx, self.derive_context(frame))) | |
self.writeline('%s.vars = ' % ctx) | |
self.visit(node.context, frame) | |
self.push_context_reference(ctx) | |
scope_frame = frame.inner(isolated=True) | |
scope_frame.symbols.analyze_node(node) | |
self.enter_frame(scope_frame) | |
self.blockvisit(node.body, scope_frame) | |
self.leave_frame(scope_frame) | |
self.pop_context_reference() | |
def visit_EvalContextModifier(self, node, frame): | |
for keyword in node.options: | |
self.writeline('context.eval_ctx.%s = ' % keyword.key) | |
self.visit(keyword.value, frame) | |
try: | |
val = keyword.value.as_const(frame.eval_ctx) | |
except nodes.Impossible: | |
frame.eval_ctx.volatile = True | |
else: | |
setattr(frame.eval_ctx, keyword.key, val) | |
def visit_ScopedEvalContextModifier(self, node, frame): | |
old_ctx_name = self.temporary_identifier() | |
saved_ctx = frame.eval_ctx.save() | |
self.writeline('%s = context.eval_ctx.save()' % old_ctx_name) | |
self.visit_EvalContextModifier(node, frame) | |
for child in node.body: | |
self.visit(child, frame) | |
frame.eval_ctx.revert(saved_ctx) | |
self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name) |
# -*- coding: utf-8 -*- | |
""" | |
jinja.constants | |
~~~~~~~~~~~~~~~ | |
Various constants. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
#: list of lorem ipsum words used by the lipsum() helper function | |
LOREM_IPSUM_WORDS = u'''\ | |
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at | |
auctor augue bibendum blandit class commodo condimentum congue consectetuer | |
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus | |
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend | |
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames | |
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac | |
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum | |
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem | |
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie | |
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non | |
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque | |
penatibus per pharetra phasellus placerat platea porta porttitor posuere | |
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus | |
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit | |
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor | |
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices | |
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus | |
viverra volutpat vulputate''' |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.debug | |
~~~~~~~~~~~~ | |
Implements the debug interface for Jinja. This module does some pretty | |
ugly stuff with the Python traceback system in order to achieve tracebacks | |
with correct line numbers, locals and contents. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import sys | |
import traceback | |
from types import TracebackType, CodeType | |
from jinja2.utils import missing, internal_code | |
from jinja2.exceptions import TemplateSyntaxError | |
from jinja2._compat import iteritems, reraise, PY2 | |
# on pypy we can take advantage of transparent proxies | |
try: | |
from __pypy__ import tproxy | |
except ImportError: | |
tproxy = None | |
# how does the raise helper look like? | |
try: | |
exec("raise TypeError, 'foo'") | |
except SyntaxError: | |
raise_helper = 'raise __jinja_exception__[1]' | |
except TypeError: | |
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]' | |
class TracebackFrameProxy(object): | |
"""Proxies a traceback frame.""" | |
def __init__(self, tb): | |
self.tb = tb | |
self._tb_next = None | |
@property | |
def tb_next(self): | |
return self._tb_next | |
def set_next(self, next): | |
if tb_set_next is not None: | |
try: | |
tb_set_next(self.tb, next and next.tb or None) | |
except Exception: | |
# this function can fail due to all the hackery it does | |
# on various python implementations. We just catch errors | |
# down and ignore them if necessary. | |
pass | |
self._tb_next = next | |
@property | |
def is_jinja_frame(self): | |
return '__jinja_template__' in self.tb.tb_frame.f_globals | |
def __getattr__(self, name): | |
return getattr(self.tb, name) | |
def make_frame_proxy(frame): | |
proxy = TracebackFrameProxy(frame) | |
if tproxy is None: | |
return proxy | |
def operation_handler(operation, *args, **kwargs): | |
if operation in ('__getattribute__', '__getattr__'): | |
return getattr(proxy, args[0]) | |
elif operation == '__setattr__': | |
proxy.__setattr__(*args, **kwargs) | |
else: | |
return getattr(proxy, operation)(*args, **kwargs) | |
return tproxy(TracebackType, operation_handler) | |
class ProcessedTraceback(object): | |
"""Holds a Jinja preprocessed traceback for printing or reraising.""" | |
def __init__(self, exc_type, exc_value, frames): | |
assert frames, 'no frames for this traceback?' | |
self.exc_type = exc_type | |
self.exc_value = exc_value | |
self.frames = frames | |
# newly concatenate the frames (which are proxies) | |
prev_tb = None | |
for tb in self.frames: | |
if prev_tb is not None: | |
prev_tb.set_next(tb) | |
prev_tb = tb | |
prev_tb.set_next(None) | |
def render_as_text(self, limit=None): | |
"""Return a string with the traceback.""" | |
lines = traceback.format_exception(self.exc_type, self.exc_value, | |
self.frames[0], limit=limit) | |
return ''.join(lines).rstrip() | |
def render_as_html(self, full=False): | |
"""Return a unicode string with the traceback as rendered HTML.""" | |
from jinja2.debugrenderer import render_traceback | |
return u'%s\n\n<!--\n%s\n-->' % ( | |
render_traceback(self, full=full), | |
self.render_as_text().decode('utf-8', 'replace') | |
) | |
@property | |
def is_template_syntax_error(self): | |
"""`True` if this is a template syntax error.""" | |
return isinstance(self.exc_value, TemplateSyntaxError) | |
@property | |
def exc_info(self): | |
"""Exception info tuple with a proxy around the frame objects.""" | |
return self.exc_type, self.exc_value, self.frames[0] | |
@property | |
def standard_exc_info(self): | |
"""Standard python exc_info for re-raising""" | |
tb = self.frames[0] | |
# the frame will be an actual traceback (or transparent proxy) if | |
# we are on pypy or a python implementation with support for tproxy | |
if type(tb) is not TracebackType: | |
tb = tb.tb | |
return self.exc_type, self.exc_value, tb | |
def make_traceback(exc_info, source_hint=None): | |
"""Creates a processed traceback object from the exc_info.""" | |
exc_type, exc_value, tb = exc_info | |
if isinstance(exc_value, TemplateSyntaxError): | |
exc_info = translate_syntax_error(exc_value, source_hint) | |
initial_skip = 0 | |
else: | |
initial_skip = 1 | |
return translate_exception(exc_info, initial_skip) | |
def translate_syntax_error(error, source=None): | |
"""Rewrites a syntax error to please traceback systems.""" | |
error.source = source | |
error.translated = True | |
exc_info = (error.__class__, error, None) | |
filename = error.filename | |
if filename is None: | |
filename = '<unknown>' | |
return fake_exc_info(exc_info, filename, error.lineno) | |
def translate_exception(exc_info, initial_skip=0): | |
"""If passed an exc_info it will automatically rewrite the exceptions | |
all the way down to the correct line numbers and frames. | |
""" | |
tb = exc_info[2] | |
frames = [] | |
# skip some internal frames if wanted | |
for x in range(initial_skip): | |
if tb is not None: | |
tb = tb.tb_next | |
initial_tb = tb | |
while tb is not None: | |
# skip frames decorated with @internalcode. These are internal | |
# calls we can't avoid and that are useless in template debugging | |
# output. | |
if tb.tb_frame.f_code in internal_code: | |
tb = tb.tb_next | |
continue | |
# save a reference to the next frame if we override the current | |
# one with a faked one. | |
next = tb.tb_next | |
# fake template exceptions | |
template = tb.tb_frame.f_globals.get('__jinja_template__') | |
if template is not None: | |
lineno = template.get_corresponding_lineno(tb.tb_lineno) | |
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, | |
lineno)[2] | |
frames.append(make_frame_proxy(tb)) | |
tb = next | |
# if we don't have any exceptions in the frames left, we have to | |
# reraise it unchanged. | |
# XXX: can we backup here? when could this happen? | |
if not frames: | |
reraise(exc_info[0], exc_info[1], exc_info[2]) | |
return ProcessedTraceback(exc_info[0], exc_info[1], frames) | |
def get_jinja_locals(real_locals): | |
ctx = real_locals.get('context') | |
if ctx: | |
locals = ctx.get_all().copy() | |
else: | |
locals = {} | |
local_overrides = {} | |
for name, value in iteritems(real_locals): | |
if not name.startswith('l_') or value is missing: | |
continue | |
try: | |
_, depth, name = name.split('_', 2) | |
depth = int(depth) | |
except ValueError: | |
continue | |
cur_depth = local_overrides.get(name, (-1,))[0] | |
if cur_depth < depth: | |
local_overrides[name] = (depth, value) | |
for name, (_, value) in iteritems(local_overrides): | |
if value is missing: | |
locals.pop(name, None) | |
else: | |
locals[name] = value | |
return locals | |
def fake_exc_info(exc_info, filename, lineno): | |
"""Helper for `translate_exception`.""" | |
exc_type, exc_value, tb = exc_info | |
# figure the real context out | |
if tb is not None: | |
locals = get_jinja_locals(tb.tb_frame.f_locals) | |
# if there is a local called __jinja_exception__, we get | |
# rid of it to not break the debug functionality. | |
locals.pop('__jinja_exception__', None) | |
else: | |
locals = {} | |
# assamble fake globals we need | |
globals = { | |
'__name__': filename, | |
'__file__': filename, | |
'__jinja_exception__': exc_info[:2], | |
# we don't want to keep the reference to the template around | |
# to not cause circular dependencies, but we mark it as Jinja | |
# frame for the ProcessedTraceback | |
'__jinja_template__': None | |
} | |
# and fake the exception | |
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') | |
# if it's possible, change the name of the code. This won't work | |
# on some python environments such as google appengine | |
try: | |
if tb is None: | |
location = 'template' | |
else: | |
function = tb.tb_frame.f_code.co_name | |
if function == 'root': | |
location = 'top-level template code' | |
elif function.startswith('block_'): | |
location = 'block "%s"' % function[6:] | |
else: | |
location = 'template' | |
if PY2: | |
code = CodeType(0, code.co_nlocals, code.co_stacksize, | |
code.co_flags, code.co_code, code.co_consts, | |
code.co_names, code.co_varnames, filename, | |
location, code.co_firstlineno, | |
code.co_lnotab, (), ()) | |
else: | |
code = CodeType(0, code.co_kwonlyargcount, | |
code.co_nlocals, code.co_stacksize, | |
code.co_flags, code.co_code, code.co_consts, | |
code.co_names, code.co_varnames, filename, | |
location, code.co_firstlineno, | |
code.co_lnotab, (), ()) | |
except Exception as e: | |
pass | |
# execute the code and catch the new traceback | |
try: | |
exec(code, globals, locals) | |
except: | |
exc_info = sys.exc_info() | |
new_tb = exc_info[2].tb_next | |
# return without this frame | |
return exc_info[:2] + (new_tb,) | |
def _init_ugly_crap(): | |
"""This function implements a few ugly things so that we can patch the | |
traceback objects. The function returned allows resetting `tb_next` on | |
any python traceback object. Do not attempt to use this on non cpython | |
interpreters | |
""" | |
import ctypes | |
from types import TracebackType | |
if PY2: | |
# figure out size of _Py_ssize_t for Python 2: | |
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): | |
_Py_ssize_t = ctypes.c_int64 | |
else: | |
_Py_ssize_t = ctypes.c_int | |
else: | |
# platform ssize_t on Python 3 | |
_Py_ssize_t = ctypes.c_ssize_t | |
# regular python | |
class _PyObject(ctypes.Structure): | |
pass | |
_PyObject._fields_ = [ | |
('ob_refcnt', _Py_ssize_t), | |
('ob_type', ctypes.POINTER(_PyObject)) | |
] | |
# python with trace | |
if hasattr(sys, 'getobjects'): | |
class _PyObject(ctypes.Structure): | |
pass | |
_PyObject._fields_ = [ | |
('_ob_next', ctypes.POINTER(_PyObject)), | |
('_ob_prev', ctypes.POINTER(_PyObject)), | |
('ob_refcnt', _Py_ssize_t), | |
('ob_type', ctypes.POINTER(_PyObject)) | |
] | |
class _Traceback(_PyObject): | |
pass | |
_Traceback._fields_ = [ | |
('tb_next', ctypes.POINTER(_Traceback)), | |
('tb_frame', ctypes.POINTER(_PyObject)), | |
('tb_lasti', ctypes.c_int), | |
('tb_lineno', ctypes.c_int) | |
] | |
def tb_set_next(tb, next): | |
"""Set the tb_next attribute of a traceback object.""" | |
if not (isinstance(tb, TracebackType) and | |
(next is None or isinstance(next, TracebackType))): | |
raise TypeError('tb_set_next arguments must be traceback objects') | |
obj = _Traceback.from_address(id(tb)) | |
if tb.tb_next is not None: | |
old = _Traceback.from_address(id(tb.tb_next)) | |
old.ob_refcnt -= 1 | |
if next is None: | |
obj.tb_next = ctypes.POINTER(_Traceback)() | |
else: | |
next = _Traceback.from_address(id(next)) | |
next.ob_refcnt += 1 | |
obj.tb_next = ctypes.pointer(next) | |
return tb_set_next | |
# try to get a tb_set_next implementation if we don't have transparent | |
# proxies. | |
tb_set_next = None | |
if tproxy is None: | |
try: | |
tb_set_next = _init_ugly_crap() | |
except: | |
pass | |
del _init_ugly_crap |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.defaults | |
~~~~~~~~~~~~~~~ | |
Jinja default filters and tags. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from jinja2._compat import range_type | |
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner, Namespace | |
# defaults for the parser / lexer | |
BLOCK_START_STRING = '{%' | |
BLOCK_END_STRING = '%}' | |
VARIABLE_START_STRING = '{{' | |
VARIABLE_END_STRING = '}}' | |
COMMENT_START_STRING = '{#' | |
COMMENT_END_STRING = '#}' | |
LINE_STATEMENT_PREFIX = None | |
LINE_COMMENT_PREFIX = None | |
TRIM_BLOCKS = False | |
LSTRIP_BLOCKS = False | |
NEWLINE_SEQUENCE = '\n' | |
KEEP_TRAILING_NEWLINE = False | |
# default filters, tests and namespace | |
from jinja2.filters import FILTERS as DEFAULT_FILTERS | |
from jinja2.tests import TESTS as DEFAULT_TESTS | |
DEFAULT_NAMESPACE = { | |
'range': range_type, | |
'dict': dict, | |
'lipsum': generate_lorem_ipsum, | |
'cycler': Cycler, | |
'joiner': Joiner, | |
'namespace': Namespace | |
} | |
# default policies | |
DEFAULT_POLICIES = { | |
'compiler.ascii_str': True, | |
'urlize.rel': 'noopener', | |
'urlize.target': None, | |
'truncate.leeway': 5, | |
'json.dumps_function': None, | |
'json.dumps_kwargs': {'sort_keys': True}, | |
'ext.i18n.trimmed': False, | |
} | |
# export all constants | |
__all__ = tuple(x for x in locals().keys() if x.isupper()) |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.environment | |
~~~~~~~~~~~~~~~~~~ | |
Provides a class that holds runtime and parsing time options. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import sys | |
import weakref | |
from functools import reduce, partial | |
from jinja2 import nodes | |
from jinja2.defaults import BLOCK_START_STRING, \ | |
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ | |
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ | |
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ | |
DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ | |
DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS | |
from jinja2.lexer import get_lexer, TokenStream | |
from jinja2.parser import Parser | |
from jinja2.nodes import EvalContext | |
from jinja2.compiler import generate, CodeGenerator | |
from jinja2.runtime import Undefined, new_context, Context | |
from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ | |
TemplatesNotFound, TemplateRuntimeError | |
from jinja2.utils import import_string, LRUCache, Markup, missing, \ | |
concat, consume, internalcode, have_async_gen | |
from jinja2._compat import imap, ifilter, string_types, iteritems, \ | |
text_type, reraise, implements_iterator, implements_to_string, \ | |
encode_filename, PY2, PYPY | |
# for direct template usage we have up to ten living environments | |
_spontaneous_environments = LRUCache(10) | |
# the function to create jinja traceback objects. This is dynamically | |
# imported on the first exception in the exception handler. | |
_make_traceback = None | |
def get_spontaneous_environment(*args): | |
"""Return a new spontaneous environment. A spontaneous environment is an | |
unnamed and unaccessible (in theory) environment that is used for | |
templates generated from a string and not from the file system. | |
""" | |
try: | |
env = _spontaneous_environments.get(args) | |
except TypeError: | |
return Environment(*args) | |
if env is not None: | |
return env | |
_spontaneous_environments[args] = env = Environment(*args) | |
env.shared = True | |
return env | |
def create_cache(size): | |
"""Return the cache class for the given size.""" | |
if size == 0: | |
return None | |
if size < 0: | |
return {} | |
return LRUCache(size) | |
def copy_cache(cache): | |
"""Create an empty copy of the given cache.""" | |
if cache is None: | |
return None | |
elif type(cache) is dict: | |
return {} | |
return LRUCache(cache.capacity) | |
def load_extensions(environment, extensions): | |
"""Load the extensions from the list and bind it to the environment. | |
Returns a dict of instantiated environments. | |
""" | |
result = {} | |
for extension in extensions: | |
if isinstance(extension, string_types): | |
extension = import_string(extension) | |
result[extension.identifier] = extension(environment) | |
return result | |
def fail_for_missing_callable(string, name): | |
msg = string % name | |
if isinstance(name, Undefined): | |
try: | |
name._fail_with_undefined_error() | |
except Exception as e: | |
msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e) | |
raise TemplateRuntimeError(msg) | |
def _environment_sanity_check(environment): | |
"""Perform a sanity check on the environment.""" | |
assert issubclass(environment.undefined, Undefined), 'undefined must ' \ | |
'be a subclass of undefined because filters depend on it.' | |
assert environment.block_start_string != \ | |
environment.variable_start_string != \ | |
environment.comment_start_string, 'block, variable and comment ' \ | |
'start strings must be different' | |
assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ | |
'newline_sequence set to unknown line ending string.' | |
return environment | |
class Environment(object): | |
r"""The core component of Jinja is the `Environment`. It contains | |
important shared variables like configuration, filters, tests, | |
globals and others. Instances of this class may be modified if | |
they are not shared and if no template was loaded so far. | |
Modifications on environments after the first template was loaded | |
will lead to surprising effects and undefined behavior. | |
Here are the possible initialization parameters: | |
`block_start_string` | |
The string marking the beginning of a block. Defaults to ``'{%'``. | |
`block_end_string` | |
The string marking the end of a block. Defaults to ``'%}'``. | |
`variable_start_string` | |
The string marking the beginning of a print statement. | |
Defaults to ``'{{'``. | |
`variable_end_string` | |
The string marking the end of a print statement. Defaults to | |
``'}}'``. | |
`comment_start_string` | |
The string marking the beginning of a comment. Defaults to ``'{#'``. | |
`comment_end_string` | |
The string marking the end of a comment. Defaults to ``'#}'``. | |
`line_statement_prefix` | |
If given and a string, this will be used as prefix for line based | |
statements. See also :ref:`line-statements`. | |
`line_comment_prefix` | |
If given and a string, this will be used as prefix for line based | |
comments. See also :ref:`line-statements`. | |
.. versionadded:: 2.2 | |
`trim_blocks` | |
If this is set to ``True`` the first newline after a block is | |
removed (block, not variable tag!). Defaults to `False`. | |
`lstrip_blocks` | |
If this is set to ``True`` leading spaces and tabs are stripped | |
from the start of a line to a block. Defaults to `False`. | |
`newline_sequence` | |
The sequence that starts a newline. Must be one of ``'\r'``, | |
``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a | |
useful default for Linux and OS X systems as well as web | |
applications. | |
`keep_trailing_newline` | |
Preserve the trailing newline when rendering templates. | |
The default is ``False``, which causes a single newline, | |
if present, to be stripped from the end of the template. | |
.. versionadded:: 2.7 | |
`extensions` | |
List of Jinja extensions to use. This can either be import paths | |
as strings or extension classes. For more information have a | |
look at :ref:`the extensions documentation <jinja-extensions>`. | |
`optimized` | |
should the optimizer be enabled? Default is ``True``. | |
`undefined` | |
:class:`Undefined` or a subclass of it that is used to represent | |
undefined values in the template. | |
`finalize` | |
A callable that can be used to process the result of a variable | |
expression before it is output. For example one can convert | |
``None`` implicitly into an empty string here. | |
`autoescape` | |
If set to ``True`` the XML/HTML autoescaping feature is enabled by | |
default. For more details about autoescaping see | |
:class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also | |
be a callable that is passed the template name and has to | |
return ``True`` or ``False`` depending on autoescape should be | |
enabled by default. | |
.. versionchanged:: 2.4 | |
`autoescape` can now be a function | |
`loader` | |
The template loader for this environment. | |
`cache_size` | |
The size of the cache. Per default this is ``400`` which means | |
that if more than 400 templates are loaded the loader will clean | |
out the least recently used template. If the cache size is set to | |
``0`` templates are recompiled all the time, if the cache size is | |
``-1`` the cache will not be cleaned. | |
.. versionchanged:: 2.8 | |
The cache size was increased to 400 from a low 50. | |
`auto_reload` | |
Some loaders load templates from locations where the template | |
sources may change (ie: file system or database). If | |
``auto_reload`` is set to ``True`` (default) every time a template is | |
requested the loader checks if the source changed and if yes, it | |
will reload the template. For higher performance it's possible to | |
disable that. | |
`bytecode_cache` | |
If set to a bytecode cache object, this object will provide a | |
cache for the internal Jinja bytecode so that templates don't | |
have to be parsed if they were not changed. | |
See :ref:`bytecode-cache` for more information. | |
`enable_async` | |
If set to true this enables async template execution which allows | |
you to take advantage of newer Python features. This requires | |
Python 3.6 or later. | |
""" | |
#: if this environment is sandboxed. Modifying this variable won't make | |
#: the environment sandboxed though. For a real sandboxed environment | |
#: have a look at jinja2.sandbox. This flag alone controls the code | |
#: generation by the compiler. | |
sandboxed = False | |
#: True if the environment is just an overlay | |
overlayed = False | |
#: the environment this environment is linked to if it is an overlay | |
linked_to = None | |
#: shared environments have this set to `True`. A shared environment | |
#: must not be modified | |
shared = False | |
#: these are currently EXPERIMENTAL undocumented features. | |
exception_handler = None | |
exception_formatter = None | |
#: the class that is used for code generation. See | |
#: :class:`~jinja2.compiler.CodeGenerator` for more information. | |
code_generator_class = CodeGenerator | |
#: the context class thatis used for templates. See | |
#: :class:`~jinja2.runtime.Context` for more information. | |
context_class = Context | |
def __init__(self, | |
block_start_string=BLOCK_START_STRING, | |
block_end_string=BLOCK_END_STRING, | |
variable_start_string=VARIABLE_START_STRING, | |
variable_end_string=VARIABLE_END_STRING, | |
comment_start_string=COMMENT_START_STRING, | |
comment_end_string=COMMENT_END_STRING, | |
line_statement_prefix=LINE_STATEMENT_PREFIX, | |
line_comment_prefix=LINE_COMMENT_PREFIX, | |
trim_blocks=TRIM_BLOCKS, | |
lstrip_blocks=LSTRIP_BLOCKS, | |
newline_sequence=NEWLINE_SEQUENCE, | |
keep_trailing_newline=KEEP_TRAILING_NEWLINE, | |
extensions=(), | |
optimized=True, | |
undefined=Undefined, | |
finalize=None, | |
autoescape=False, | |
loader=None, | |
cache_size=400, | |
auto_reload=True, | |
bytecode_cache=None, | |
enable_async=False): | |
# !!Important notice!! | |
# The constructor accepts quite a few arguments that should be | |
# passed by keyword rather than position. However it's important to | |
# not change the order of arguments because it's used at least | |
# internally in those cases: | |
# - spontaneous environments (i18n extension and Template) | |
# - unittests | |
# If parameter changes are required only add parameters at the end | |
# and don't change the arguments (or the defaults!) of the arguments | |
# existing already. | |
# lexer / parser information | |
self.block_start_string = block_start_string | |
self.block_end_string = block_end_string | |
self.variable_start_string = variable_start_string | |
self.variable_end_string = variable_end_string | |
self.comment_start_string = comment_start_string | |
self.comment_end_string = comment_end_string | |
self.line_statement_prefix = line_statement_prefix | |
self.line_comment_prefix = line_comment_prefix | |
self.trim_blocks = trim_blocks | |
self.lstrip_blocks = lstrip_blocks | |
self.newline_sequence = newline_sequence | |
self.keep_trailing_newline = keep_trailing_newline | |
# runtime information | |
self.undefined = undefined | |
self.optimized = optimized | |
self.finalize = finalize | |
self.autoescape = autoescape | |
# defaults | |
self.filters = DEFAULT_FILTERS.copy() | |
self.tests = DEFAULT_TESTS.copy() | |
self.globals = DEFAULT_NAMESPACE.copy() | |
# set the loader provided | |
self.loader = loader | |
self.cache = create_cache(cache_size) | |
self.bytecode_cache = bytecode_cache | |
self.auto_reload = auto_reload | |
# configurable policies | |
self.policies = DEFAULT_POLICIES.copy() | |
# load extensions | |
self.extensions = load_extensions(self, extensions) | |
self.enable_async = enable_async | |
self.is_async = self.enable_async and have_async_gen | |
_environment_sanity_check(self) | |
def add_extension(self, extension): | |
"""Adds an extension after the environment was created. | |
.. versionadded:: 2.5 | |
""" | |
self.extensions.update(load_extensions(self, [extension])) | |
def extend(self, **attributes): | |
"""Add the items to the instance of the environment if they do not exist | |
yet. This is used by :ref:`extensions <writing-extensions>` to register | |
callbacks and configuration values without breaking inheritance. | |
""" | |
for key, value in iteritems(attributes): | |
if not hasattr(self, key): | |
setattr(self, key, value) | |
def overlay(self, block_start_string=missing, block_end_string=missing, | |
variable_start_string=missing, variable_end_string=missing, | |
comment_start_string=missing, comment_end_string=missing, | |
line_statement_prefix=missing, line_comment_prefix=missing, | |
trim_blocks=missing, lstrip_blocks=missing, | |
extensions=missing, optimized=missing, | |
undefined=missing, finalize=missing, autoescape=missing, | |
loader=missing, cache_size=missing, auto_reload=missing, | |
bytecode_cache=missing): | |
"""Create a new overlay environment that shares all the data with the | |
current environment except for cache and the overridden attributes. | |
Extensions cannot be removed for an overlayed environment. An overlayed | |
environment automatically gets all the extensions of the environment it | |
is linked to plus optional extra extensions. | |
Creating overlays should happen after the initial environment was set | |
up completely. Not all attributes are truly linked, some are just | |
copied over so modifications on the original environment may not shine | |
through. | |
""" | |
args = dict(locals()) | |
del args['self'], args['cache_size'], args['extensions'] | |
rv = object.__new__(self.__class__) | |
rv.__dict__.update(self.__dict__) | |
rv.overlayed = True | |
rv.linked_to = self | |
for key, value in iteritems(args): | |
if value is not missing: | |
setattr(rv, key, value) | |
if cache_size is not missing: | |
rv.cache = create_cache(cache_size) | |
else: | |
rv.cache = copy_cache(self.cache) | |
rv.extensions = {} | |
for key, value in iteritems(self.extensions): | |
rv.extensions[key] = value.bind(rv) | |
if extensions is not missing: | |
rv.extensions.update(load_extensions(rv, extensions)) | |
return _environment_sanity_check(rv) | |
lexer = property(get_lexer, doc="The lexer for this environment.") | |
def iter_extensions(self): | |
"""Iterates over the extensions by priority.""" | |
return iter(sorted(self.extensions.values(), | |
key=lambda x: x.priority)) | |
def getitem(self, obj, argument): | |
"""Get an item or attribute of an object but prefer the item.""" | |
try: | |
return obj[argument] | |
except (AttributeError, TypeError, LookupError): | |
if isinstance(argument, string_types): | |
try: | |
attr = str(argument) | |
except Exception: | |
pass | |
else: | |
try: | |
return getattr(obj, attr) | |
except AttributeError: | |
pass | |
return self.undefined(obj=obj, name=argument) | |
def getattr(self, obj, attribute): | |
"""Get an item or attribute of an object but prefer the attribute. | |
Unlike :meth:`getitem` the attribute *must* be a bytestring. | |
""" | |
try: | |
return getattr(obj, attribute) | |
except AttributeError: | |
pass | |
try: | |
return obj[attribute] | |
except (TypeError, LookupError, AttributeError): | |
return self.undefined(obj=obj, name=attribute) | |
def call_filter(self, name, value, args=None, kwargs=None, | |
context=None, eval_ctx=None): | |
"""Invokes a filter on a value the same way the compiler does it. | |
Note that on Python 3 this might return a coroutine in case the | |
filter is running from an environment in async mode and the filter | |
supports async execution. It's your responsibility to await this | |
if needed. | |
.. versionadded:: 2.7 | |
""" | |
func = self.filters.get(name) | |
if func is None: | |
fail_for_missing_callable('no filter named %r', name) | |
args = [value] + list(args or ()) | |
if getattr(func, 'contextfilter', False): | |
if context is None: | |
raise TemplateRuntimeError('Attempted to invoke context ' | |
'filter without context') | |
args.insert(0, context) | |
elif getattr(func, 'evalcontextfilter', False): | |
if eval_ctx is None: | |
if context is not None: | |
eval_ctx = context.eval_ctx | |
else: | |
eval_ctx = EvalContext(self) | |
args.insert(0, eval_ctx) | |
elif getattr(func, 'environmentfilter', False): | |
args.insert(0, self) | |
return func(*args, **(kwargs or {})) | |
def call_test(self, name, value, args=None, kwargs=None): | |
"""Invokes a test on a value the same way the compiler does it. | |
.. versionadded:: 2.7 | |
""" | |
func = self.tests.get(name) | |
if func is None: | |
fail_for_missing_callable('no test named %r', name) | |
return func(value, *(args or ()), **(kwargs or {})) | |
@internalcode | |
def parse(self, source, name=None, filename=None): | |
"""Parse the sourcecode and return the abstract syntax tree. This | |
tree of nodes is used by the compiler to convert the template into | |
executable source- or bytecode. This is useful for debugging or to | |
extract information from templates. | |
If you are :ref:`developing Jinja2 extensions <writing-extensions>` | |
this gives you a good overview of the node tree generated. | |
""" | |
try: | |
return self._parse(source, name, filename) | |
except TemplateSyntaxError: | |
exc_info = sys.exc_info() | |
self.handle_exception(exc_info, source_hint=source) | |
def _parse(self, source, name, filename): | |
"""Internal parsing function used by `parse` and `compile`.""" | |
return Parser(self, source, name, encode_filename(filename)).parse() | |
def lex(self, source, name=None, filename=None): | |
"""Lex the given sourcecode and return a generator that yields | |
tokens as tuples in the form ``(lineno, token_type, value)``. | |
This can be useful for :ref:`extension development <writing-extensions>` | |
and debugging templates. | |
This does not perform preprocessing. If you want the preprocessing | |
of the extensions to be applied you have to filter source through | |
the :meth:`preprocess` method. | |
""" | |
source = text_type(source) | |
try: | |
return self.lexer.tokeniter(source, name, filename) | |
except TemplateSyntaxError: | |
exc_info = sys.exc_info() | |
self.handle_exception(exc_info, source_hint=source) | |
def preprocess(self, source, name=None, filename=None): | |
"""Preprocesses the source with all extensions. This is automatically | |
called for all parsing and compiling methods but *not* for :meth:`lex` | |
because there you usually only want the actual source tokenized. | |
""" | |
return reduce(lambda s, e: e.preprocess(s, name, filename), | |
self.iter_extensions(), text_type(source)) | |
def _tokenize(self, source, name, filename=None, state=None): | |
"""Called by the parser to do the preprocessing and filtering | |
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. | |
""" | |
source = self.preprocess(source, name, filename) | |
stream = self.lexer.tokenize(source, name, filename, state) | |
for ext in self.iter_extensions(): | |
stream = ext.filter_stream(stream) | |
if not isinstance(stream, TokenStream): | |
stream = TokenStream(stream, name, filename) | |
return stream | |
def _generate(self, source, name, filename, defer_init=False): | |
"""Internal hook that can be overridden to hook a different generate | |
method in. | |
.. versionadded:: 2.5 | |
""" | |
return generate(source, self, name, filename, defer_init=defer_init, | |
optimized=self.optimized) | |
def _compile(self, source, filename): | |
"""Internal hook that can be overridden to hook a different compile | |
method in. | |
.. versionadded:: 2.5 | |
""" | |
return compile(source, filename, 'exec') | |
@internalcode | |
def compile(self, source, name=None, filename=None, raw=False, | |
defer_init=False): | |
"""Compile a node or template source code. The `name` parameter is | |
the load name of the template after it was joined using | |
:meth:`join_path` if necessary, not the filename on the file system. | |
the `filename` parameter is the estimated filename of the template on | |
the file system. If the template came from a database or memory this | |
can be omitted. | |
The return value of this method is a python code object. If the `raw` | |
parameter is `True` the return value will be a string with python | |
code equivalent to the bytecode returned otherwise. This method is | |
mainly used internally. | |
`defer_init` is use internally to aid the module code generator. This | |
causes the generated code to be able to import without the global | |
environment variable to be set. | |
.. versionadded:: 2.4 | |
`defer_init` parameter added. | |
""" | |
source_hint = None | |
try: | |
if isinstance(source, string_types): | |
source_hint = source | |
source = self._parse(source, name, filename) | |
source = self._generate(source, name, filename, | |
defer_init=defer_init) | |
if raw: | |
return source | |
if filename is None: | |
filename = '<template>' | |
else: | |
filename = encode_filename(filename) | |
return self._compile(source, filename) | |
except TemplateSyntaxError: | |
exc_info = sys.exc_info() | |
self.handle_exception(exc_info, source_hint=source_hint) | |
def compile_expression(self, source, undefined_to_none=True): | |
"""A handy helper method that returns a callable that accepts keyword | |
arguments that appear as variables in the expression. If called it | |
returns the result of the expression. | |
This is useful if applications want to use the same rules as Jinja | |
in template "configuration files" or similar situations. | |
Example usage: | |
>>> env = Environment() | |
>>> expr = env.compile_expression('foo == 42') | |
>>> expr(foo=23) | |
False | |
>>> expr(foo=42) | |
True | |
Per default the return value is converted to `None` if the | |
expression returns an undefined value. This can be changed | |
by setting `undefined_to_none` to `False`. | |
>>> env.compile_expression('var')() is None | |
True | |
>>> env.compile_expression('var', undefined_to_none=False)() | |
Undefined | |
.. versionadded:: 2.1 | |
""" | |
parser = Parser(self, source, state='variable') | |
exc_info = None | |
try: | |
expr = parser.parse_expression() | |
if not parser.stream.eos: | |
raise TemplateSyntaxError('chunk after expression', | |
parser.stream.current.lineno, | |
None, None) | |
expr.set_environment(self) | |
except TemplateSyntaxError: | |
exc_info = sys.exc_info() | |
if exc_info is not None: | |
self.handle_exception(exc_info, source_hint=source) | |
body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)] | |
template = self.from_string(nodes.Template(body, lineno=1)) | |
return TemplateExpression(template, undefined_to_none) | |
def compile_templates(self, target, extensions=None, filter_func=None, | |
zip='deflated', log_function=None, | |
ignore_errors=True, py_compile=False): | |
"""Finds all the templates the loader can find, compiles them | |
and stores them in `target`. If `zip` is `None`, instead of in a | |
zipfile, the templates will be stored in a directory. | |
By default a deflate zip algorithm is used. To switch to | |
the stored algorithm, `zip` can be set to ``'stored'``. | |
`extensions` and `filter_func` are passed to :meth:`list_templates`. | |
Each template returned will be compiled to the target folder or | |
zipfile. | |
By default template compilation errors are ignored. In case a | |
log function is provided, errors are logged. If you want template | |
syntax errors to abort the compilation you can set `ignore_errors` | |
to `False` and you will get an exception on syntax errors. | |
If `py_compile` is set to `True` .pyc files will be written to the | |
target instead of standard .py files. This flag does not do anything | |
on pypy and Python 3 where pyc files are not picked up by itself and | |
don't give much benefit. | |
.. versionadded:: 2.4 | |
""" | |
from jinja2.loaders import ModuleLoader | |
if log_function is None: | |
log_function = lambda x: None | |
if py_compile: | |
if not PY2 or PYPY: | |
from warnings import warn | |
warn(Warning('py_compile has no effect on pypy or Python 3')) | |
py_compile = False | |
else: | |
import imp | |
import marshal | |
py_header = imp.get_magic() + \ | |
u'\xff\xff\xff\xff'.encode('iso-8859-15') | |
# Python 3.3 added a source filesize to the header | |
if sys.version_info >= (3, 3): | |
py_header += u'\x00\x00\x00\x00'.encode('iso-8859-15') | |
def write_file(filename, data, mode): | |
if zip: | |
info = ZipInfo(filename) | |
info.external_attr = 0o755 << 16 | |
zip_file.writestr(info, data) | |
else: | |
f = open(os.path.join(target, filename), mode) | |
try: | |
f.write(data) | |
finally: | |
f.close() | |
if zip is not None: | |
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED | |
zip_file = ZipFile(target, 'w', dict(deflated=ZIP_DEFLATED, | |
stored=ZIP_STORED)[zip]) | |
log_function('Compiling into Zip archive "%s"' % target) | |
else: | |
if not os.path.isdir(target): | |
os.makedirs(target) | |
log_function('Compiling into folder "%s"' % target) | |
try: | |
for name in self.list_templates(extensions, filter_func): | |
source, filename, _ = self.loader.get_source(self, name) | |
try: | |
code = self.compile(source, name, filename, True, True) | |
except TemplateSyntaxError as e: | |
if not ignore_errors: | |
raise | |
log_function('Could not compile "%s": %s' % (name, e)) | |
continue | |
filename = ModuleLoader.get_module_filename(name) | |
if py_compile: | |
c = self._compile(code, encode_filename(filename)) | |
write_file(filename + 'c', py_header + | |
marshal.dumps(c), 'wb') | |
log_function('Byte-compiled "%s" as %s' % | |
(name, filename + 'c')) | |
else: | |
write_file(filename, code, 'w') | |
log_function('Compiled "%s" as %s' % (name, filename)) | |
finally: | |
if zip: | |
zip_file.close() | |
log_function('Finished compiling templates') | |
def list_templates(self, extensions=None, filter_func=None): | |
"""Returns a list of templates for this environment. This requires | |
that the loader supports the loader's | |
:meth:`~BaseLoader.list_templates` method. | |
If there are other files in the template folder besides the | |
actual templates, the returned list can be filtered. There are two | |
ways: either `extensions` is set to a list of file extensions for | |
templates, or a `filter_func` can be provided which is a callable that | |
is passed a template name and should return `True` if it should end up | |
in the result list. | |
If the loader does not support that, a :exc:`TypeError` is raised. | |
.. versionadded:: 2.4 | |
""" | |
x = self.loader.list_templates() | |
if extensions is not None: | |
if filter_func is not None: | |
raise TypeError('either extensions or filter_func ' | |
'can be passed, but not both') | |
filter_func = lambda x: '.' in x and \ | |
x.rsplit('.', 1)[1] in extensions | |
if filter_func is not None: | |
x = list(ifilter(filter_func, x)) | |
return x | |
def handle_exception(self, exc_info=None, rendered=False, source_hint=None): | |
"""Exception handling helper. This is used internally to either raise | |
rewritten exceptions or return a rendered traceback for the template. | |
""" | |
global _make_traceback | |
if exc_info is None: | |
exc_info = sys.exc_info() | |
# the debugging module is imported when it's used for the first time. | |
# we're doing a lot of stuff there and for applications that do not | |
# get any exceptions in template rendering there is no need to load | |
# all of that. | |
if _make_traceback is None: | |
from jinja2.debug import make_traceback as _make_traceback | |
traceback = _make_traceback(exc_info, source_hint) | |
if rendered and self.exception_formatter is not None: | |
return self.exception_formatter(traceback) | |
if self.exception_handler is not None: | |
self.exception_handler(traceback) | |
exc_type, exc_value, tb = traceback.standard_exc_info | |
reraise(exc_type, exc_value, tb) | |
def join_path(self, template, parent): | |
"""Join a template with the parent. By default all the lookups are | |
relative to the loader root so this method returns the `template` | |
parameter unchanged, but if the paths should be relative to the | |
parent template, this function can be used to calculate the real | |
template name. | |
Subclasses may override this method and implement template path | |
joining here. | |
""" | |
return template | |
@internalcode | |
def _load_template(self, name, globals): | |
if self.loader is None: | |
raise TypeError('no loader for this environment specified') | |
cache_key = (weakref.ref(self.loader), name) | |
if self.cache is not None: | |
template = self.cache.get(cache_key) | |
if template is not None and (not self.auto_reload or | |
template.is_up_to_date): | |
return template | |
template = self.loader.load(self, name, globals) | |
if self.cache is not None: | |
self.cache[cache_key] = template | |
return template | |
@internalcode | |
def get_template(self, name, parent=None, globals=None): | |
"""Load a template from the loader. If a loader is configured this | |
method asks the loader for the template and returns a :class:`Template`. | |
If the `parent` parameter is not `None`, :meth:`join_path` is called | |
to get the real template name before loading. | |
The `globals` parameter can be used to provide template wide globals. | |
These variables are available in the context at render time. | |
If the template does not exist a :exc:`TemplateNotFound` exception is | |
raised. | |
.. versionchanged:: 2.4 | |
If `name` is a :class:`Template` object it is returned from the | |
function unchanged. | |
""" | |
if isinstance(name, Template): | |
return name | |
if parent is not None: | |
name = self.join_path(name, parent) | |
return self._load_template(name, self.make_globals(globals)) | |
@internalcode | |
def select_template(self, names, parent=None, globals=None): | |
"""Works like :meth:`get_template` but tries a number of templates | |
before it fails. If it cannot find any of the templates, it will | |
raise a :exc:`TemplatesNotFound` exception. | |
.. versionadded:: 2.3 | |
.. versionchanged:: 2.4 | |
If `names` contains a :class:`Template` object it is returned | |
from the function unchanged. | |
""" | |
if not names: | |
raise TemplatesNotFound(message=u'Tried to select from an empty list ' | |
u'of templates.') | |
globals = self.make_globals(globals) | |
for name in names: | |
if isinstance(name, Template): | |
return name | |
if parent is not None: | |
name = self.join_path(name, parent) | |
try: | |
return self._load_template(name, globals) | |
except TemplateNotFound: | |
pass | |
raise TemplatesNotFound(names) | |
@internalcode | |
def get_or_select_template(self, template_name_or_list, | |
parent=None, globals=None): | |
"""Does a typecheck and dispatches to :meth:`select_template` | |
if an iterable of template names is given, otherwise to | |
:meth:`get_template`. | |
.. versionadded:: 2.3 | |
""" | |
if isinstance(template_name_or_list, string_types): | |
return self.get_template(template_name_or_list, parent, globals) | |
elif isinstance(template_name_or_list, Template): | |
return template_name_or_list | |
return self.select_template(template_name_or_list, parent, globals) | |
def from_string(self, source, globals=None, template_class=None): | |
"""Load a template from a string. This parses the source given and | |
returns a :class:`Template` object. | |
""" | |
globals = self.make_globals(globals) | |
cls = template_class or self.template_class | |
return cls.from_code(self, self.compile(source), globals, None) | |
def make_globals(self, d): | |
"""Return a dict for the globals.""" | |
if not d: | |
return self.globals | |
return dict(self.globals, **d) | |
class Template(object): | |
"""The central template object. This class represents a compiled template | |
and is used to evaluate it. | |
Normally the template object is generated from an :class:`Environment` but | |
it also has a constructor that makes it possible to create a template | |
instance directly using the constructor. It takes the same arguments as | |
the environment constructor but it's not possible to specify a loader. | |
Every template object has a few methods and members that are guaranteed | |
to exist. However it's important that a template object should be | |
considered immutable. Modifications on the object are not supported. | |
Template objects created from the constructor rather than an environment | |
do have an `environment` attribute that points to a temporary environment | |
that is probably shared with other templates created with the constructor | |
and compatible settings. | |
>>> template = Template('Hello {{ name }}!') | |
>>> template.render(name='John Doe') == u'Hello John Doe!' | |
True | |
>>> stream = template.stream(name='John Doe') | |
>>> next(stream) == u'Hello John Doe!' | |
True | |
>>> next(stream) | |
Traceback (most recent call last): | |
... | |
StopIteration | |
""" | |
def __new__(cls, source, | |
block_start_string=BLOCK_START_STRING, | |
block_end_string=BLOCK_END_STRING, | |
variable_start_string=VARIABLE_START_STRING, | |
variable_end_string=VARIABLE_END_STRING, | |
comment_start_string=COMMENT_START_STRING, | |
comment_end_string=COMMENT_END_STRING, | |
line_statement_prefix=LINE_STATEMENT_PREFIX, | |
line_comment_prefix=LINE_COMMENT_PREFIX, | |
trim_blocks=TRIM_BLOCKS, | |
lstrip_blocks=LSTRIP_BLOCKS, | |
newline_sequence=NEWLINE_SEQUENCE, | |
keep_trailing_newline=KEEP_TRAILING_NEWLINE, | |
extensions=(), | |
optimized=True, | |
undefined=Undefined, | |
finalize=None, | |
autoescape=False, | |
enable_async=False): | |
env = get_spontaneous_environment( | |
block_start_string, block_end_string, variable_start_string, | |
variable_end_string, comment_start_string, comment_end_string, | |
line_statement_prefix, line_comment_prefix, trim_blocks, | |
lstrip_blocks, newline_sequence, keep_trailing_newline, | |
frozenset(extensions), optimized, undefined, finalize, autoescape, | |
None, 0, False, None, enable_async) | |
return env.from_string(source, template_class=cls) | |
@classmethod | |
def from_code(cls, environment, code, globals, uptodate=None): | |
"""Creates a template object from compiled code and the globals. This | |
is used by the loaders and environment to create a template object. | |
""" | |
namespace = { | |
'environment': environment, | |
'__file__': code.co_filename | |
} | |
exec(code, namespace) | |
rv = cls._from_namespace(environment, namespace, globals) | |
rv._uptodate = uptodate | |
return rv | |
@classmethod | |
def from_module_dict(cls, environment, module_dict, globals): | |
"""Creates a template object from a module. This is used by the | |
module loader to create a template object. | |
.. versionadded:: 2.4 | |
""" | |
return cls._from_namespace(environment, module_dict, globals) | |
@classmethod | |
def _from_namespace(cls, environment, namespace, globals): | |
t = object.__new__(cls) | |
t.environment = environment | |
t.globals = globals | |
t.name = namespace['name'] | |
t.filename = namespace['__file__'] | |
t.blocks = namespace['blocks'] | |
# render function and module | |
t.root_render_func = namespace['root'] | |
t._module = None | |
# debug and loader helpers | |
t._debug_info = namespace['debug_info'] | |
t._uptodate = None | |
# store the reference | |
namespace['environment'] = environment | |
namespace['__jinja_template__'] = t | |
return t | |
def render(self, *args, **kwargs): | |
"""This method accepts the same arguments as the `dict` constructor: | |
A dict, a dict subclass or some keyword arguments. If no arguments | |
are given the context will be empty. These two calls do the same:: | |
template.render(knights='that say nih') | |
template.render({'knights': 'that say nih'}) | |
This will return the rendered template as unicode string. | |
""" | |
vars = dict(*args, **kwargs) | |
try: | |
return concat(self.root_render_func(self.new_context(vars))) | |
except Exception: | |
exc_info = sys.exc_info() | |
return self.environment.handle_exception(exc_info, True) | |
def render_async(self, *args, **kwargs): | |
"""This works similar to :meth:`render` but returns a coroutine | |
that when awaited returns the entire rendered template string. This | |
requires the async feature to be enabled. | |
Example usage:: | |
await template.render_async(knights='that say nih; asynchronously') | |
""" | |
# see asyncsupport for the actual implementation | |
raise NotImplementedError('This feature is not available for this ' | |
'version of Python') | |
def stream(self, *args, **kwargs): | |
"""Works exactly like :meth:`generate` but returns a | |
:class:`TemplateStream`. | |
""" | |
return TemplateStream(self.generate(*args, **kwargs)) | |
def generate(self, *args, **kwargs): | |
"""For very large templates it can be useful to not render the whole | |
template at once but evaluate each statement after another and yield | |
piece for piece. This method basically does exactly that and returns | |
a generator that yields one item after another as unicode strings. | |
It accepts the same arguments as :meth:`render`. | |
""" | |
vars = dict(*args, **kwargs) | |
try: | |
for event in self.root_render_func(self.new_context(vars)): | |
yield event | |
except Exception: | |
exc_info = sys.exc_info() | |
else: | |
return | |
yield self.environment.handle_exception(exc_info, True) | |
def generate_async(self, *args, **kwargs): | |
"""An async version of :meth:`generate`. Works very similarly but | |
returns an async iterator instead. | |
""" | |
# see asyncsupport for the actual implementation | |
raise NotImplementedError('This feature is not available for this ' | |
'version of Python') | |
def new_context(self, vars=None, shared=False, locals=None): | |
"""Create a new :class:`Context` for this template. The vars | |
provided will be passed to the template. Per default the globals | |
are added to the context. If shared is set to `True` the data | |
is passed as it to the context without adding the globals. | |
`locals` can be a dict of local variables for internal usage. | |
""" | |
return new_context(self.environment, self.name, self.blocks, | |
vars, shared, self.globals, locals) | |
def make_module(self, vars=None, shared=False, locals=None): | |
"""This method works like the :attr:`module` attribute when called | |
without arguments but it will evaluate the template on every call | |
rather than caching it. It's also possible to provide | |
a dict which is then used as context. The arguments are the same | |
as for the :meth:`new_context` method. | |
""" | |
return TemplateModule(self, self.new_context(vars, shared, locals)) | |
def make_module_async(self, vars=None, shared=False, locals=None): | |
"""As template module creation can invoke template code for | |
asynchronous exections this method must be used instead of the | |
normal :meth:`make_module` one. Likewise the module attribute | |
becomes unavailable in async mode. | |
""" | |
# see asyncsupport for the actual implementation | |
raise NotImplementedError('This feature is not available for this ' | |
'version of Python') | |
@internalcode | |
def _get_default_module(self): | |
if self._module is not None: | |
return self._module | |
self._module = rv = self.make_module() | |
return rv | |
@property | |
def module(self): | |
"""The template as module. This is used for imports in the | |
template runtime but is also useful if one wants to access | |
exported template variables from the Python layer: | |
>>> t = Template('{% macro foo() %}42{% endmacro %}23') | |
>>> str(t.module) | |
'23' | |
>>> t.module.foo() == u'42' | |
True | |
This attribute is not available if async mode is enabled. | |
""" | |
return self._get_default_module() | |
def get_corresponding_lineno(self, lineno): | |
"""Return the source line number of a line number in the | |
generated bytecode as they are not in sync. | |
""" | |
for template_line, code_line in reversed(self.debug_info): | |
if code_line <= lineno: | |
return template_line | |
return 1 | |
@property | |
def is_up_to_date(self): | |
"""If this variable is `False` there is a newer version available.""" | |
if self._uptodate is None: | |
return True | |
return self._uptodate() | |
@property | |
def debug_info(self): | |
"""The debug info mapping.""" | |
return [tuple(imap(int, x.split('='))) for x in | |
self._debug_info.split('&')] | |
def __repr__(self): | |
if self.name is None: | |
name = 'memory:%x' % id(self) | |
else: | |
name = repr(self.name) | |
return '<%s %s>' % (self.__class__.__name__, name) | |
@implements_to_string | |
class TemplateModule(object): | |
"""Represents an imported template. All the exported names of the | |
template are available as attributes on this object. Additionally | |
converting it into an unicode- or bytestrings renders the contents. | |
""" | |
def __init__(self, template, context, body_stream=None): | |
if body_stream is None: | |
if context.environment.is_async: | |
raise RuntimeError('Async mode requires a body stream ' | |
'to be passed to a template module. Use ' | |
'the async methods of the API you are ' | |
'using.') | |
body_stream = list(template.root_render_func(context)) | |
self._body_stream = body_stream | |
self.__dict__.update(context.get_exported()) | |
self.__name__ = template.name | |
def __html__(self): | |
return Markup(concat(self._body_stream)) | |
def __str__(self): | |
return concat(self._body_stream) | |
def __repr__(self): | |
if self.__name__ is None: | |
name = 'memory:%x' % id(self) | |
else: | |
name = repr(self.__name__) | |
return '<%s %s>' % (self.__class__.__name__, name) | |
class TemplateExpression(object): | |
"""The :meth:`jinja2.Environment.compile_expression` method returns an | |
instance of this object. It encapsulates the expression-like access | |
to the template with an expression it wraps. | |
""" | |
def __init__(self, template, undefined_to_none): | |
self._template = template | |
self._undefined_to_none = undefined_to_none | |
def __call__(self, *args, **kwargs): | |
context = self._template.new_context(dict(*args, **kwargs)) | |
consume(self._template.root_render_func(context)) | |
rv = context.vars['result'] | |
if self._undefined_to_none and isinstance(rv, Undefined): | |
rv = None | |
return rv | |
@implements_iterator | |
class TemplateStream(object): | |
"""A template stream works pretty much like an ordinary python generator | |
but it can buffer multiple items to reduce the number of total iterations. | |
Per default the output is unbuffered which means that for every unbuffered | |
instruction in the template one unicode string is yielded. | |
If buffering is enabled with a buffer size of 5, five items are combined | |
into a new unicode string. This is mainly useful if you are streaming | |
big templates to a client via WSGI which flushes after each iteration. | |
""" | |
def __init__(self, gen): | |
self._gen = gen | |
self.disable_buffering() | |
def dump(self, fp, encoding=None, errors='strict'): | |
"""Dump the complete stream into a file or file-like object. | |
Per default unicode strings are written, if you want to encode | |
before writing specify an `encoding`. | |
Example usage:: | |
Template('Hello {{ name }}!').stream(name='foo').dump('hello.html') | |
""" | |
close = False | |
if isinstance(fp, string_types): | |
if encoding is None: | |
encoding = 'utf-8' | |
fp = open(fp, 'wb') | |
close = True | |
try: | |
if encoding is not None: | |
iterable = (x.encode(encoding, errors) for x in self) | |
else: | |
iterable = self | |
if hasattr(fp, 'writelines'): | |
fp.writelines(iterable) | |
else: | |
for item in iterable: | |
fp.write(item) | |
finally: | |
if close: | |
fp.close() | |
def disable_buffering(self): | |
"""Disable the output buffering.""" | |
self._next = partial(next, self._gen) | |
self.buffered = False | |
def _buffered_generator(self, size): | |
buf = [] | |
c_size = 0 | |
push = buf.append | |
while 1: | |
try: | |
while c_size < size: | |
c = next(self._gen) | |
push(c) | |
if c: | |
c_size += 1 | |
except StopIteration: | |
if not c_size: | |
return | |
yield concat(buf) | |
del buf[:] | |
c_size = 0 | |
def enable_buffering(self, size=5): | |
"""Enable buffering. Buffer `size` items before yielding them.""" | |
if size <= 1: | |
raise ValueError('buffer size too small') | |
self.buffered = True | |
self._next = partial(next, self._buffered_generator(size)) | |
def __iter__(self): | |
return self | |
def __next__(self): | |
return self._next() | |
# hook in default template class. if anyone reads this comment: ignore that | |
# it's possible to use custom templates ;-) | |
Environment.template_class = Template |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.exceptions | |
~~~~~~~~~~~~~~~~~ | |
Jinja exceptions. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from jinja2._compat import imap, text_type, PY2, implements_to_string | |
class TemplateError(Exception): | |
"""Baseclass for all template errors.""" | |
if PY2: | |
def __init__(self, message=None): | |
if message is not None: | |
message = text_type(message).encode('utf-8') | |
Exception.__init__(self, message) | |
@property | |
def message(self): | |
if self.args: | |
message = self.args[0] | |
if message is not None: | |
return message.decode('utf-8', 'replace') | |
def __unicode__(self): | |
return self.message or u'' | |
else: | |
def __init__(self, message=None): | |
Exception.__init__(self, message) | |
@property | |
def message(self): | |
if self.args: | |
message = self.args[0] | |
if message is not None: | |
return message | |
@implements_to_string | |
class TemplateNotFound(IOError, LookupError, TemplateError): | |
"""Raised if a template does not exist.""" | |
# looks weird, but removes the warning descriptor that just | |
# bogusly warns us about message being deprecated | |
message = None | |
def __init__(self, name, message=None): | |
IOError.__init__(self) | |
if message is None: | |
message = name | |
self.message = message | |
self.name = name | |
self.templates = [name] | |
def __str__(self): | |
return self.message | |
class TemplatesNotFound(TemplateNotFound): | |
"""Like :class:`TemplateNotFound` but raised if multiple templates | |
are selected. This is a subclass of :class:`TemplateNotFound` | |
exception, so just catching the base exception will catch both. | |
.. versionadded:: 2.2 | |
""" | |
def __init__(self, names=(), message=None): | |
if message is None: | |
message = u'none of the templates given were found: ' + \ | |
u', '.join(imap(text_type, names)) | |
TemplateNotFound.__init__(self, names and names[-1] or None, message) | |
self.templates = list(names) | |
@implements_to_string | |
class TemplateSyntaxError(TemplateError): | |
"""Raised to tell the user that there is a problem with the template.""" | |
def __init__(self, message, lineno, name=None, filename=None): | |
TemplateError.__init__(self, message) | |
self.lineno = lineno | |
self.name = name | |
self.filename = filename | |
self.source = None | |
# this is set to True if the debug.translate_syntax_error | |
# function translated the syntax error into a new traceback | |
self.translated = False | |
def __str__(self): | |
# for translated errors we only return the message | |
if self.translated: | |
return self.message | |
# otherwise attach some stuff | |
location = 'line %d' % self.lineno | |
name = self.filename or self.name | |
if name: | |
location = 'File "%s", %s' % (name, location) | |
lines = [self.message, ' ' + location] | |
# if the source is set, add the line to the output | |
if self.source is not None: | |
try: | |
line = self.source.splitlines()[self.lineno - 1] | |
except IndexError: | |
line = None | |
if line: | |
lines.append(' ' + line.strip()) | |
return u'\n'.join(lines) | |
class TemplateAssertionError(TemplateSyntaxError): | |
"""Like a template syntax error, but covers cases where something in the | |
template caused an error at compile time that wasn't necessarily caused | |
by a syntax error. However it's a direct subclass of | |
:exc:`TemplateSyntaxError` and has the same attributes. | |
""" | |
class TemplateRuntimeError(TemplateError): | |
"""A generic runtime error in the template engine. Under some situations | |
Jinja may raise this exception. | |
""" | |
class UndefinedError(TemplateRuntimeError): | |
"""Raised if a template tries to operate on :class:`Undefined`.""" | |
class SecurityError(TemplateRuntimeError): | |
"""Raised if a template tries to do something insecure if the | |
sandbox is enabled. | |
""" | |
class FilterArgumentError(TemplateRuntimeError): | |
"""This error is raised if a filter was called with inappropriate | |
arguments | |
""" |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.ext | |
~~~~~~~~~~ | |
Jinja extensions allow to add custom tags similar to the way django custom | |
tags work. By default two example extensions exist: an i18n and a cache | |
extension. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD. | |
""" | |
import re | |
from jinja2 import nodes | |
from jinja2.defaults import BLOCK_START_STRING, \ | |
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ | |
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ | |
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ | |
KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS | |
from jinja2.environment import Environment | |
from jinja2.runtime import concat | |
from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError | |
from jinja2.utils import contextfunction, import_string, Markup | |
from jinja2._compat import with_metaclass, string_types, iteritems | |
# the only real useful gettext functions for a Jinja template. Note | |
# that ugettext must be assigned to gettext as Jinja doesn't support | |
# non unicode strings. | |
GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext') | |
class ExtensionRegistry(type): | |
"""Gives the extension an unique identifier.""" | |
def __new__(cls, name, bases, d): | |
rv = type.__new__(cls, name, bases, d) | |
rv.identifier = rv.__module__ + '.' + rv.__name__ | |
return rv | |
class Extension(with_metaclass(ExtensionRegistry, object)): | |
"""Extensions can be used to add extra functionality to the Jinja template | |
system at the parser level. Custom extensions are bound to an environment | |
but may not store environment specific data on `self`. The reason for | |
this is that an extension can be bound to another environment (for | |
overlays) by creating a copy and reassigning the `environment` attribute. | |
As extensions are created by the environment they cannot accept any | |
arguments for configuration. One may want to work around that by using | |
a factory function, but that is not possible as extensions are identified | |
by their import name. The correct way to configure the extension is | |
storing the configuration values on the environment. Because this way the | |
environment ends up acting as central configuration storage the | |
attributes may clash which is why extensions have to ensure that the names | |
they choose for configuration are not too generic. ``prefix`` for example | |
is a terrible name, ``fragment_cache_prefix`` on the other hand is a good | |
name as includes the name of the extension (fragment cache). | |
""" | |
#: if this extension parses this is the list of tags it's listening to. | |
tags = set() | |
#: the priority of that extension. This is especially useful for | |
#: extensions that preprocess values. A lower value means higher | |
#: priority. | |
#: | |
#: .. versionadded:: 2.4 | |
priority = 100 | |
def __init__(self, environment): | |
self.environment = environment | |
def bind(self, environment): | |
"""Create a copy of this extension bound to another environment.""" | |
rv = object.__new__(self.__class__) | |
rv.__dict__.update(self.__dict__) | |
rv.environment = environment | |
return rv | |
def preprocess(self, source, name, filename=None): | |
"""This method is called before the actual lexing and can be used to | |
preprocess the source. The `filename` is optional. The return value | |
must be the preprocessed source. | |
""" | |
return source | |
def filter_stream(self, stream): | |
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used | |
to filter tokens returned. This method has to return an iterable of | |
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a | |
:class:`~jinja2.lexer.TokenStream`. | |
In the `ext` folder of the Jinja2 source distribution there is a file | |
called `inlinegettext.py` which implements a filter that utilizes this | |
method. | |
""" | |
return stream | |
def parse(self, parser): | |
"""If any of the :attr:`tags` matched this method is called with the | |
parser as first argument. The token the parser stream is pointing at | |
is the name token that matched. This method has to return one or a | |
list of multiple nodes. | |
""" | |
raise NotImplementedError() | |
def attr(self, name, lineno=None): | |
"""Return an attribute node for the current extension. This is useful | |
to pass constants on extensions to generated template code. | |
:: | |
self.attr('_my_attribute', lineno=lineno) | |
""" | |
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno) | |
def call_method(self, name, args=None, kwargs=None, dyn_args=None, | |
dyn_kwargs=None, lineno=None): | |
"""Call a method of the extension. This is a shortcut for | |
:meth:`attr` + :class:`jinja2.nodes.Call`. | |
""" | |
if args is None: | |
args = [] | |
if kwargs is None: | |
kwargs = [] | |
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs, | |
dyn_args, dyn_kwargs, lineno=lineno) | |
@contextfunction | |
def _gettext_alias(__context, *args, **kwargs): | |
return __context.call(__context.resolve('gettext'), *args, **kwargs) | |
def _make_new_gettext(func): | |
@contextfunction | |
def gettext(__context, __string, **variables): | |
rv = __context.call(func, __string) | |
if __context.eval_ctx.autoescape: | |
rv = Markup(rv) | |
return rv % variables | |
return gettext | |
def _make_new_ngettext(func): | |
@contextfunction | |
def ngettext(__context, __singular, __plural, __num, **variables): | |
variables.setdefault('num', __num) | |
rv = __context.call(func, __singular, __plural, __num) | |
if __context.eval_ctx.autoescape: | |
rv = Markup(rv) | |
return rv % variables | |
return ngettext | |
class InternationalizationExtension(Extension): | |
"""This extension adds gettext support to Jinja2.""" | |
tags = set(['trans']) | |
# TODO: the i18n extension is currently reevaluating values in a few | |
# situations. Take this example: | |
# {% trans count=something() %}{{ count }} foo{% pluralize | |
# %}{{ count }} fooss{% endtrans %} | |
# something is called twice here. One time for the gettext value and | |
# the other time for the n-parameter of the ngettext function. | |
def __init__(self, environment): | |
Extension.__init__(self, environment) | |
environment.globals['_'] = _gettext_alias | |
environment.extend( | |
install_gettext_translations=self._install, | |
install_null_translations=self._install_null, | |
install_gettext_callables=self._install_callables, | |
uninstall_gettext_translations=self._uninstall, | |
extract_translations=self._extract, | |
newstyle_gettext=False | |
) | |
def _install(self, translations, newstyle=None): | |
gettext = getattr(translations, 'ugettext', None) | |
if gettext is None: | |
gettext = translations.gettext | |
ngettext = getattr(translations, 'ungettext', None) | |
if ngettext is None: | |
ngettext = translations.ngettext | |
self._install_callables(gettext, ngettext, newstyle) | |
def _install_null(self, newstyle=None): | |
self._install_callables( | |
lambda x: x, | |
lambda s, p, n: (n != 1 and (p,) or (s,))[0], | |
newstyle | |
) | |
def _install_callables(self, gettext, ngettext, newstyle=None): | |
if newstyle is not None: | |
self.environment.newstyle_gettext = newstyle | |
if self.environment.newstyle_gettext: | |
gettext = _make_new_gettext(gettext) | |
ngettext = _make_new_ngettext(ngettext) | |
self.environment.globals.update( | |
gettext=gettext, | |
ngettext=ngettext | |
) | |
def _uninstall(self, translations): | |
for key in 'gettext', 'ngettext': | |
self.environment.globals.pop(key, None) | |
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS): | |
if isinstance(source, string_types): | |
source = self.environment.parse(source) | |
return extract_from_ast(source, gettext_functions) | |
def parse(self, parser): | |
"""Parse a translatable tag.""" | |
lineno = next(parser.stream).lineno | |
num_called_num = False | |
# find all the variables referenced. Additionally a variable can be | |
# defined in the body of the trans block too, but this is checked at | |
# a later state. | |
plural_expr = None | |
plural_expr_assignment = None | |
variables = {} | |
trimmed = None | |
while parser.stream.current.type != 'block_end': | |
if variables: | |
parser.stream.expect('comma') | |
# skip colon for python compatibility | |
if parser.stream.skip_if('colon'): | |
break | |
name = parser.stream.expect('name') | |
if name.value in variables: | |
parser.fail('translatable variable %r defined twice.' % | |
name.value, name.lineno, | |
exc=TemplateAssertionError) | |
# expressions | |
if parser.stream.current.type == 'assign': | |
next(parser.stream) | |
variables[name.value] = var = parser.parse_expression() | |
elif trimmed is None and name.value in ('trimmed', 'notrimmed'): | |
trimmed = name.value == 'trimmed' | |
continue | |
else: | |
variables[name.value] = var = nodes.Name(name.value, 'load') | |
if plural_expr is None: | |
if isinstance(var, nodes.Call): | |
plural_expr = nodes.Name('_trans', 'load') | |
variables[name.value] = plural_expr | |
plural_expr_assignment = nodes.Assign( | |
nodes.Name('_trans', 'store'), var) | |
else: | |
plural_expr = var | |
num_called_num = name.value == 'num' | |
parser.stream.expect('block_end') | |
plural = None | |
have_plural = False | |
referenced = set() | |
# now parse until endtrans or pluralize | |
singular_names, singular = self._parse_block(parser, True) | |
if singular_names: | |
referenced.update(singular_names) | |
if plural_expr is None: | |
plural_expr = nodes.Name(singular_names[0], 'load') | |
num_called_num = singular_names[0] == 'num' | |
# if we have a pluralize block, we parse that too | |
if parser.stream.current.test('name:pluralize'): | |
have_plural = True | |
next(parser.stream) | |
if parser.stream.current.type != 'block_end': | |
name = parser.stream.expect('name') | |
if name.value not in variables: | |
parser.fail('unknown variable %r for pluralization' % | |
name.value, name.lineno, | |
exc=TemplateAssertionError) | |
plural_expr = variables[name.value] | |
num_called_num = name.value == 'num' | |
parser.stream.expect('block_end') | |
plural_names, plural = self._parse_block(parser, False) | |
next(parser.stream) | |
referenced.update(plural_names) | |
else: | |
next(parser.stream) | |
# register free names as simple name expressions | |
for var in referenced: | |
if var not in variables: | |
variables[var] = nodes.Name(var, 'load') | |
if not have_plural: | |
plural_expr = None | |
elif plural_expr is None: | |
parser.fail('pluralize without variables', lineno) | |
if trimmed is None: | |
trimmed = self.environment.policies['ext.i18n.trimmed'] | |
if trimmed: | |
singular = self._trim_whitespace(singular) | |
if plural: | |
plural = self._trim_whitespace(plural) | |
node = self._make_node(singular, plural, variables, plural_expr, | |
bool(referenced), | |
num_called_num and have_plural) | |
node.set_lineno(lineno) | |
if plural_expr_assignment is not None: | |
return [plural_expr_assignment, node] | |
else: | |
return node | |
def _trim_whitespace(self, string, _ws_re=re.compile(r'\s*\n\s*')): | |
return _ws_re.sub(' ', string.strip()) | |
def _parse_block(self, parser, allow_pluralize): | |
"""Parse until the next block tag with a given name.""" | |
referenced = [] | |
buf = [] | |
while 1: | |
if parser.stream.current.type == 'data': | |
buf.append(parser.stream.current.value.replace('%', '%%')) | |
next(parser.stream) | |
elif parser.stream.current.type == 'variable_begin': | |
next(parser.stream) | |
name = parser.stream.expect('name').value | |
referenced.append(name) | |
buf.append('%%(%s)s' % name) | |
parser.stream.expect('variable_end') | |
elif parser.stream.current.type == 'block_begin': | |
next(parser.stream) | |
if parser.stream.current.test('name:endtrans'): | |
break | |
elif parser.stream.current.test('name:pluralize'): | |
if allow_pluralize: | |
break | |
parser.fail('a translatable section can have only one ' | |
'pluralize section') | |
parser.fail('control structures in translatable sections are ' | |
'not allowed') | |
elif parser.stream.eos: | |
parser.fail('unclosed translation block') | |
else: | |
assert False, 'internal parser error' | |
return referenced, concat(buf) | |
def _make_node(self, singular, plural, variables, plural_expr, | |
vars_referenced, num_called_num): | |
"""Generates a useful node from the data provided.""" | |
# no variables referenced? no need to escape for old style | |
# gettext invocations only if there are vars. | |
if not vars_referenced and not self.environment.newstyle_gettext: | |
singular = singular.replace('%%', '%') | |
if plural: | |
plural = plural.replace('%%', '%') | |
# singular only: | |
if plural_expr is None: | |
gettext = nodes.Name('gettext', 'load') | |
node = nodes.Call(gettext, [nodes.Const(singular)], | |
[], None, None) | |
# singular and plural | |
else: | |
ngettext = nodes.Name('ngettext', 'load') | |
node = nodes.Call(ngettext, [ | |
nodes.Const(singular), | |
nodes.Const(plural), | |
plural_expr | |
], [], None, None) | |
# in case newstyle gettext is used, the method is powerful | |
# enough to handle the variable expansion and autoescape | |
# handling itself | |
if self.environment.newstyle_gettext: | |
for key, value in iteritems(variables): | |
# the function adds that later anyways in case num was | |
# called num, so just skip it. | |
if num_called_num and key == 'num': | |
continue | |
node.kwargs.append(nodes.Keyword(key, value)) | |
# otherwise do that here | |
else: | |
# mark the return value as safe if we are in an | |
# environment with autoescaping turned on | |
node = nodes.MarkSafeIfAutoescape(node) | |
if variables: | |
node = nodes.Mod(node, nodes.Dict([ | |
nodes.Pair(nodes.Const(key), value) | |
for key, value in variables.items() | |
])) | |
return nodes.Output([node]) | |
class ExprStmtExtension(Extension): | |
"""Adds a `do` tag to Jinja2 that works like the print statement just | |
that it doesn't print the return value. | |
""" | |
tags = set(['do']) | |
def parse(self, parser): | |
node = nodes.ExprStmt(lineno=next(parser.stream).lineno) | |
node.node = parser.parse_tuple() | |
return node | |
class LoopControlExtension(Extension): | |
"""Adds break and continue to the template engine.""" | |
tags = set(['break', 'continue']) | |
def parse(self, parser): | |
token = next(parser.stream) | |
if token.value == 'break': | |
return nodes.Break(lineno=token.lineno) | |
return nodes.Continue(lineno=token.lineno) | |
class WithExtension(Extension): | |
pass | |
class AutoEscapeExtension(Extension): | |
pass | |
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, | |
babel_style=True): | |
"""Extract localizable strings from the given template node. Per | |
default this function returns matches in babel style that means non string | |
parameters as well as keyword arguments are returned as `None`. This | |
allows Babel to figure out what you really meant if you are using | |
gettext functions that allow keyword arguments for placeholder expansion. | |
If you don't want that behavior set the `babel_style` parameter to `False` | |
which causes only strings to be returned and parameters are always stored | |
in tuples. As a consequence invalid gettext calls (calls without a single | |
string parameter or string parameters after non-string parameters) are | |
skipped. | |
This example explains the behavior: | |
>>> from jinja2 import Environment | |
>>> env = Environment() | |
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}') | |
>>> list(extract_from_ast(node)) | |
[(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))] | |
>>> list(extract_from_ast(node, babel_style=False)) | |
[(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))] | |
For every string found this function yields a ``(lineno, function, | |
message)`` tuple, where: | |
* ``lineno`` is the number of the line on which the string was found, | |
* ``function`` is the name of the ``gettext`` function used (if the | |
string was extracted from embedded Python code), and | |
* ``message`` is the string itself (a ``unicode`` object, or a tuple | |
of ``unicode`` objects for functions with multiple string arguments). | |
This extraction function operates on the AST and is because of that unable | |
to extract any comments. For comment support you have to use the babel | |
extraction interface or extract comments yourself. | |
""" | |
for node in node.find_all(nodes.Call): | |
if not isinstance(node.node, nodes.Name) or \ | |
node.node.name not in gettext_functions: | |
continue | |
strings = [] | |
for arg in node.args: | |
if isinstance(arg, nodes.Const) and \ | |
isinstance(arg.value, string_types): | |
strings.append(arg.value) | |
else: | |
strings.append(None) | |
for arg in node.kwargs: | |
strings.append(None) | |
if node.dyn_args is not None: | |
strings.append(None) | |
if node.dyn_kwargs is not None: | |
strings.append(None) | |
if not babel_style: | |
strings = tuple(x for x in strings if x is not None) | |
if not strings: | |
continue | |
else: | |
if len(strings) == 1: | |
strings = strings[0] | |
else: | |
strings = tuple(strings) | |
yield node.lineno, node.node.name, strings | |
class _CommentFinder(object): | |
"""Helper class to find comments in a token stream. Can only | |
find comments for gettext calls forwards. Once the comment | |
from line 4 is found, a comment for line 1 will not return a | |
usable value. | |
""" | |
def __init__(self, tokens, comment_tags): | |
self.tokens = tokens | |
self.comment_tags = comment_tags | |
self.offset = 0 | |
self.last_lineno = 0 | |
def find_backwards(self, offset): | |
try: | |
for _, token_type, token_value in \ | |
reversed(self.tokens[self.offset:offset]): | |
if token_type in ('comment', 'linecomment'): | |
try: | |
prefix, comment = token_value.split(None, 1) | |
except ValueError: | |
continue | |
if prefix in self.comment_tags: | |
return [comment.rstrip()] | |
return [] | |
finally: | |
self.offset = offset | |
def find_comments(self, lineno): | |
if not self.comment_tags or self.last_lineno > lineno: | |
return [] | |
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]): | |
if token_lineno > lineno: | |
return self.find_backwards(self.offset + idx) | |
return self.find_backwards(len(self.tokens)) | |
def babel_extract(fileobj, keywords, comment_tags, options): | |
"""Babel extraction method for Jinja templates. | |
.. versionchanged:: 2.3 | |
Basic support for translation comments was added. If `comment_tags` | |
is now set to a list of keywords for extraction, the extractor will | |
try to find the best preceeding comment that begins with one of the | |
keywords. For best results, make sure to not have more than one | |
gettext call in one line of code and the matching comment in the | |
same line or the line before. | |
.. versionchanged:: 2.5.1 | |
The `newstyle_gettext` flag can be set to `True` to enable newstyle | |
gettext calls. | |
.. versionchanged:: 2.7 | |
A `silent` option can now be provided. If set to `False` template | |
syntax errors are propagated instead of being ignored. | |
:param fileobj: the file-like object the messages should be extracted from | |
:param keywords: a list of keywords (i.e. function names) that should be | |
recognized as translation functions | |
:param comment_tags: a list of translator tags to search for and include | |
in the results. | |
:param options: a dictionary of additional options (optional) | |
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples. | |
(comments will be empty currently) | |
""" | |
extensions = set() | |
for extension in options.get('extensions', '').split(','): | |
extension = extension.strip() | |
if not extension: | |
continue | |
extensions.add(import_string(extension)) | |
if InternationalizationExtension not in extensions: | |
extensions.add(InternationalizationExtension) | |
def getbool(options, key, default=False): | |
return options.get(key, str(default)).lower() in \ | |
('1', 'on', 'yes', 'true') | |
silent = getbool(options, 'silent', True) | |
environment = Environment( | |
options.get('block_start_string', BLOCK_START_STRING), | |
options.get('block_end_string', BLOCK_END_STRING), | |
options.get('variable_start_string', VARIABLE_START_STRING), | |
options.get('variable_end_string', VARIABLE_END_STRING), | |
options.get('comment_start_string', COMMENT_START_STRING), | |
options.get('comment_end_string', COMMENT_END_STRING), | |
options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX, | |
options.get('line_comment_prefix') or LINE_COMMENT_PREFIX, | |
getbool(options, 'trim_blocks', TRIM_BLOCKS), | |
getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS), | |
NEWLINE_SEQUENCE, | |
getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE), | |
frozenset(extensions), | |
cache_size=0, | |
auto_reload=False | |
) | |
if getbool(options, 'trimmed'): | |
environment.policies['ext.i18n.trimmed'] = True | |
if getbool(options, 'newstyle_gettext'): | |
environment.newstyle_gettext = True | |
source = fileobj.read().decode(options.get('encoding', 'utf-8')) | |
try: | |
node = environment.parse(source) | |
tokens = list(environment.lex(environment.preprocess(source))) | |
except TemplateSyntaxError as e: | |
if not silent: | |
raise | |
# skip templates with syntax errors | |
return | |
finder = _CommentFinder(tokens, comment_tags) | |
for lineno, func, message in extract_from_ast(node, keywords): | |
yield lineno, func, message, finder.find_comments(lineno) | |
#: nicer import names | |
i18n = InternationalizationExtension | |
do = ExprStmtExtension | |
loopcontrols = LoopControlExtension | |
with_ = WithExtension | |
autoescape = AutoEscapeExtension |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.filters | |
~~~~~~~~~~~~~~ | |
Bundled jinja filters. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import re | |
import math | |
import random | |
import warnings | |
from itertools import groupby, chain | |
from collections import namedtuple | |
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \ | |
unicode_urlencode, htmlsafe_json_dumps | |
from jinja2.runtime import Undefined | |
from jinja2.exceptions import FilterArgumentError | |
from jinja2._compat import imap, string_types, text_type, iteritems, PY2 | |
_word_re = re.compile(r'\w+', re.UNICODE) | |
_word_beginning_split_re = re.compile(r'([-\s\(\{\[\<]+)', re.UNICODE) | |
def contextfilter(f): | |
"""Decorator for marking context dependent filters. The current | |
:class:`Context` will be passed as first argument. | |
""" | |
f.contextfilter = True | |
return f | |
def evalcontextfilter(f): | |
"""Decorator for marking eval-context dependent filters. An eval | |
context object is passed as first argument. For more information | |
about the eval context, see :ref:`eval-context`. | |
.. versionadded:: 2.4 | |
""" | |
f.evalcontextfilter = True | |
return f | |
def environmentfilter(f): | |
"""Decorator for marking environment dependent filters. The current | |
:class:`Environment` is passed to the filter as first argument. | |
""" | |
f.environmentfilter = True | |
return f | |
def ignore_case(value): | |
"""For use as a postprocessor for :func:`make_attrgetter`. Converts strings | |
to lowercase and returns other types as-is.""" | |
return value.lower() if isinstance(value, string_types) else value | |
def make_attrgetter(environment, attribute, postprocess=None): | |
"""Returns a callable that looks up the given attribute from a | |
passed object with the rules of the environment. Dots are allowed | |
to access attributes of attributes. Integer parts in paths are | |
looked up as integers. | |
""" | |
if attribute is None: | |
attribute = [] | |
elif isinstance(attribute, string_types): | |
attribute = [int(x) if x.isdigit() else x for x in attribute.split('.')] | |
else: | |
attribute = [attribute] | |
def attrgetter(item): | |
for part in attribute: | |
item = environment.getitem(item, part) | |
if postprocess is not None: | |
item = postprocess(item) | |
return item | |
return attrgetter | |
def do_forceescape(value): | |
"""Enforce HTML escaping. This will probably double escape variables.""" | |
if hasattr(value, '__html__'): | |
value = value.__html__() | |
return escape(text_type(value)) | |
def do_urlencode(value): | |
"""Escape strings for use in URLs (uses UTF-8 encoding). It accepts both | |
dictionaries and regular strings as well as pairwise iterables. | |
.. versionadded:: 2.7 | |
""" | |
itemiter = None | |
if isinstance(value, dict): | |
itemiter = iteritems(value) | |
elif not isinstance(value, string_types): | |
try: | |
itemiter = iter(value) | |
except TypeError: | |
pass | |
if itemiter is None: | |
return unicode_urlencode(value) | |
return u'&'.join(unicode_urlencode(k) + '=' + | |
unicode_urlencode(v, for_qs=True) | |
for k, v in itemiter) | |
@evalcontextfilter | |
def do_replace(eval_ctx, s, old, new, count=None): | |
"""Return a copy of the value with all occurrences of a substring | |
replaced with a new one. The first argument is the substring | |
that should be replaced, the second is the replacement string. | |
If the optional third argument ``count`` is given, only the first | |
``count`` occurrences are replaced: | |
.. sourcecode:: jinja | |
{{ "Hello World"|replace("Hello", "Goodbye") }} | |
-> Goodbye World | |
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }} | |
-> d'oh, d'oh, aaargh | |
""" | |
if count is None: | |
count = -1 | |
if not eval_ctx.autoescape: | |
return text_type(s).replace(text_type(old), text_type(new), count) | |
if hasattr(old, '__html__') or hasattr(new, '__html__') and \ | |
not hasattr(s, '__html__'): | |
s = escape(s) | |
else: | |
s = soft_unicode(s) | |
return s.replace(soft_unicode(old), soft_unicode(new), count) | |
def do_upper(s): | |
"""Convert a value to uppercase.""" | |
return soft_unicode(s).upper() | |
def do_lower(s): | |
"""Convert a value to lowercase.""" | |
return soft_unicode(s).lower() | |
@evalcontextfilter | |
def do_xmlattr(_eval_ctx, d, autospace=True): | |
"""Create an SGML/XML attribute string based on the items in a dict. | |
All values that are neither `none` nor `undefined` are automatically | |
escaped: | |
.. sourcecode:: html+jinja | |
<ul{{ {'class': 'my_list', 'missing': none, | |
'id': 'list-%d'|format(variable)}|xmlattr }}> | |
... | |
</ul> | |
Results in something like this: | |
.. sourcecode:: html | |
<ul class="my_list" id="list-42"> | |
... | |
</ul> | |
As you can see it automatically prepends a space in front of the item | |
if the filter returned something unless the second parameter is false. | |
""" | |
rv = u' '.join( | |
u'%s="%s"' % (escape(key), escape(value)) | |
for key, value in iteritems(d) | |
if value is not None and not isinstance(value, Undefined) | |
) | |
if autospace and rv: | |
rv = u' ' + rv | |
if _eval_ctx.autoescape: | |
rv = Markup(rv) | |
return rv | |
def do_capitalize(s): | |
"""Capitalize a value. The first character will be uppercase, all others | |
lowercase. | |
""" | |
return soft_unicode(s).capitalize() | |
def do_title(s): | |
"""Return a titlecased version of the value. I.e. words will start with | |
uppercase letters, all remaining characters are lowercase. | |
""" | |
return ''.join( | |
[item[0].upper() + item[1:].lower() | |
for item in _word_beginning_split_re.split(soft_unicode(s)) | |
if item]) | |
def do_dictsort(value, case_sensitive=False, by='key', reverse=False): | |
"""Sort a dict and yield (key, value) pairs. Because python dicts are | |
unsorted you may want to use this function to order them by either | |
key or value: | |
.. sourcecode:: jinja | |
{% for item in mydict|dictsort %} | |
sort the dict by key, case insensitive | |
{% for item in mydict|dictsort(reverse=true) %} | |
sort the dict by key, case insensitive, reverse order | |
{% for item in mydict|dictsort(true) %} | |
sort the dict by key, case sensitive | |
{% for item in mydict|dictsort(false, 'value') %} | |
sort the dict by value, case insensitive | |
""" | |
if by == 'key': | |
pos = 0 | |
elif by == 'value': | |
pos = 1 | |
else: | |
raise FilterArgumentError( | |
'You can only sort by either "key" or "value"' | |
) | |
def sort_func(item): | |
value = item[pos] | |
if not case_sensitive: | |
value = ignore_case(value) | |
return value | |
return sorted(value.items(), key=sort_func, reverse=reverse) | |
@environmentfilter | |
def do_sort( | |
environment, value, reverse=False, case_sensitive=False, attribute=None | |
): | |
"""Sort an iterable. Per default it sorts ascending, if you pass it | |
true as first argument it will reverse the sorting. | |
If the iterable is made of strings the third parameter can be used to | |
control the case sensitiveness of the comparison which is disabled by | |
default. | |
.. sourcecode:: jinja | |
{% for item in iterable|sort %} | |
... | |
{% endfor %} | |
It is also possible to sort by an attribute (for example to sort | |
by the date of an object) by specifying the `attribute` parameter: | |
.. sourcecode:: jinja | |
{% for item in iterable|sort(attribute='date') %} | |
... | |
{% endfor %} | |
.. versionchanged:: 2.6 | |
The `attribute` parameter was added. | |
""" | |
key_func = make_attrgetter( | |
environment, attribute, | |
postprocess=ignore_case if not case_sensitive else None | |
) | |
return sorted(value, key=key_func, reverse=reverse) | |
@environmentfilter | |
def do_unique(environment, value, case_sensitive=False, attribute=None): | |
"""Returns a list of unique items from the the given iterable. | |
.. sourcecode:: jinja | |
{{ ['foo', 'bar', 'foobar', 'FooBar']|unique }} | |
-> ['foo', 'bar', 'foobar'] | |
The unique items are yielded in the same order as their first occurrence in | |
the iterable passed to the filter. | |
:param case_sensitive: Treat upper and lower case strings as distinct. | |
:param attribute: Filter objects with unique values for this attribute. | |
""" | |
getter = make_attrgetter( | |
environment, attribute, | |
postprocess=ignore_case if not case_sensitive else None | |
) | |
seen = set() | |
for item in value: | |
key = getter(item) | |
if key not in seen: | |
seen.add(key) | |
yield item | |
def _min_or_max(environment, value, func, case_sensitive, attribute): | |
it = iter(value) | |
try: | |
first = next(it) | |
except StopIteration: | |
return environment.undefined('No aggregated item, sequence was empty.') | |
key_func = make_attrgetter( | |
environment, attribute, | |
ignore_case if not case_sensitive else None | |
) | |
return func(chain([first], it), key=key_func) | |
@environmentfilter | |
def do_min(environment, value, case_sensitive=False, attribute=None): | |
"""Return the smallest item from the sequence. | |
.. sourcecode:: jinja | |
{{ [1, 2, 3]|min }} | |
-> 1 | |
:param case_sensitive: Treat upper and lower case strings as distinct. | |
:param attribute: Get the object with the max value of this attribute. | |
""" | |
return _min_or_max(environment, value, min, case_sensitive, attribute) | |
@environmentfilter | |
def do_max(environment, value, case_sensitive=False, attribute=None): | |
"""Return the largest item from the sequence. | |
.. sourcecode:: jinja | |
{{ [1, 2, 3]|max }} | |
-> 3 | |
:param case_sensitive: Treat upper and lower case strings as distinct. | |
:param attribute: Get the object with the max value of this attribute. | |
""" | |
return _min_or_max(environment, value, max, case_sensitive, attribute) | |
def do_default(value, default_value=u'', boolean=False): | |
"""If the value is undefined it will return the passed default value, | |
otherwise the value of the variable: | |
.. sourcecode:: jinja | |
{{ my_variable|default('my_variable is not defined') }} | |
This will output the value of ``my_variable`` if the variable was | |
defined, otherwise ``'my_variable is not defined'``. If you want | |
to use default with variables that evaluate to false you have to | |
set the second parameter to `true`: | |
.. sourcecode:: jinja | |
{{ ''|default('the string was empty', true) }} | |
""" | |
if isinstance(value, Undefined) or (boolean and not value): | |
return default_value | |
return value | |
@evalcontextfilter | |
def do_join(eval_ctx, value, d=u'', attribute=None): | |
"""Return a string which is the concatenation of the strings in the | |
sequence. The separator between elements is an empty string per | |
default, you can define it with the optional parameter: | |
.. sourcecode:: jinja | |
{{ [1, 2, 3]|join('|') }} | |
-> 1|2|3 | |
{{ [1, 2, 3]|join }} | |
-> 123 | |
It is also possible to join certain attributes of an object: | |
.. sourcecode:: jinja | |
{{ users|join(', ', attribute='username') }} | |
.. versionadded:: 2.6 | |
The `attribute` parameter was added. | |
""" | |
if attribute is not None: | |
value = imap(make_attrgetter(eval_ctx.environment, attribute), value) | |
# no automatic escaping? joining is a lot eaiser then | |
if not eval_ctx.autoescape: | |
return text_type(d).join(imap(text_type, value)) | |
# if the delimiter doesn't have an html representation we check | |
# if any of the items has. If yes we do a coercion to Markup | |
if not hasattr(d, '__html__'): | |
value = list(value) | |
do_escape = False | |
for idx, item in enumerate(value): | |
if hasattr(item, '__html__'): | |
do_escape = True | |
else: | |
value[idx] = text_type(item) | |
if do_escape: | |
d = escape(d) | |
else: | |
d = text_type(d) | |
return d.join(value) | |
# no html involved, to normal joining | |
return soft_unicode(d).join(imap(soft_unicode, value)) | |
def do_center(value, width=80): | |
"""Centers the value in a field of a given width.""" | |
return text_type(value).center(width) | |
@environmentfilter | |
def do_first(environment, seq): | |
"""Return the first item of a sequence.""" | |
try: | |
return next(iter(seq)) | |
except StopIteration: | |
return environment.undefined('No first item, sequence was empty.') | |
@environmentfilter | |
def do_last(environment, seq): | |
"""Return the last item of a sequence.""" | |
try: | |
return next(iter(reversed(seq))) | |
except StopIteration: | |
return environment.undefined('No last item, sequence was empty.') | |
@contextfilter | |
def do_random(context, seq): | |
"""Return a random item from the sequence.""" | |
try: | |
return random.choice(seq) | |
except IndexError: | |
return context.environment.undefined('No random item, sequence was empty.') | |
def do_filesizeformat(value, binary=False): | |
"""Format the value like a 'human-readable' file size (i.e. 13 kB, | |
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega, | |
Giga, etc.), if the second parameter is set to `True` the binary | |
prefixes are used (Mebi, Gibi). | |
""" | |
bytes = float(value) | |
base = binary and 1024 or 1000 | |
prefixes = [ | |
(binary and 'KiB' or 'kB'), | |
(binary and 'MiB' or 'MB'), | |
(binary and 'GiB' or 'GB'), | |
(binary and 'TiB' or 'TB'), | |
(binary and 'PiB' or 'PB'), | |
(binary and 'EiB' or 'EB'), | |
(binary and 'ZiB' or 'ZB'), | |
(binary and 'YiB' or 'YB') | |
] | |
if bytes == 1: | |
return '1 Byte' | |
elif bytes < base: | |
return '%d Bytes' % bytes | |
else: | |
for i, prefix in enumerate(prefixes): | |
unit = base ** (i + 2) | |
if bytes < unit: | |
return '%.1f %s' % ((base * bytes / unit), prefix) | |
return '%.1f %s' % ((base * bytes / unit), prefix) | |
def do_pprint(value, verbose=False): | |
"""Pretty print a variable. Useful for debugging. | |
With Jinja 1.2 onwards you can pass it a parameter. If this parameter | |
is truthy the output will be more verbose (this requires `pretty`) | |
""" | |
return pformat(value, verbose=verbose) | |
@evalcontextfilter | |
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False, | |
target=None, rel=None): | |
"""Converts URLs in plain text into clickable links. | |
If you pass the filter an additional integer it will shorten the urls | |
to that number. Also a third argument exists that makes the urls | |
"nofollow": | |
.. sourcecode:: jinja | |
{{ mytext|urlize(40, true) }} | |
links are shortened to 40 chars and defined with rel="nofollow" | |
If *target* is specified, the ``target`` attribute will be added to the | |
``<a>`` tag: | |
.. sourcecode:: jinja | |
{{ mytext|urlize(40, target='_blank') }} | |
.. versionchanged:: 2.8+ | |
The *target* parameter was added. | |
""" | |
policies = eval_ctx.environment.policies | |
rel = set((rel or '').split() or []) | |
if nofollow: | |
rel.add('nofollow') | |
rel.update((policies['urlize.rel'] or '').split()) | |
if target is None: | |
target = policies['urlize.target'] | |
rel = ' '.join(sorted(rel)) or None | |
rv = urlize(value, trim_url_limit, rel=rel, target=target) | |
if eval_ctx.autoescape: | |
rv = Markup(rv) | |
return rv | |
def do_indent( | |
s, width=4, first=False, blank=False, indentfirst=None | |
): | |
"""Return a copy of the string with each line indented by 4 spaces. The | |
first line and blank lines are not indented by default. | |
:param width: Number of spaces to indent by. | |
:param first: Don't skip indenting the first line. | |
:param blank: Don't skip indenting empty lines. | |
.. versionchanged:: 2.10 | |
Blank lines are not indented by default. | |
Rename the ``indentfirst`` argument to ``first``. | |
""" | |
if indentfirst is not None: | |
warnings.warn(DeprecationWarning( | |
'The "indentfirst" argument is renamed to "first".' | |
), stacklevel=2) | |
first = indentfirst | |
s += u'\n' # this quirk is necessary for splitlines method | |
indention = u' ' * width | |
if blank: | |
rv = (u'\n' + indention).join(s.splitlines()) | |
else: | |
lines = s.splitlines() | |
rv = lines.pop(0) | |
if lines: | |
rv += u'\n' + u'\n'.join( | |
indention + line if line else line for line in lines | |
) | |
if first: | |
rv = indention + rv | |
return rv | |
@environmentfilter | |
def do_truncate(env, s, length=255, killwords=False, end='...', leeway=None): | |
"""Return a truncated copy of the string. The length is specified | |
with the first parameter which defaults to ``255``. If the second | |
parameter is ``true`` the filter will cut the text at length. Otherwise | |
it will discard the last word. If the text was in fact | |
truncated it will append an ellipsis sign (``"..."``). If you want a | |
different ellipsis sign than ``"..."`` you can specify it using the | |
third parameter. Strings that only exceed the length by the tolerance | |
margin given in the fourth parameter will not be truncated. | |
.. sourcecode:: jinja | |
{{ "foo bar baz qux"|truncate(9) }} | |
-> "foo..." | |
{{ "foo bar baz qux"|truncate(9, True) }} | |
-> "foo ba..." | |
{{ "foo bar baz qux"|truncate(11) }} | |
-> "foo bar baz qux" | |
{{ "foo bar baz qux"|truncate(11, False, '...', 0) }} | |
-> "foo bar..." | |
The default leeway on newer Jinja2 versions is 5 and was 0 before but | |
can be reconfigured globally. | |
""" | |
if leeway is None: | |
leeway = env.policies['truncate.leeway'] | |
assert length >= len(end), 'expected length >= %s, got %s' % (len(end), length) | |
assert leeway >= 0, 'expected leeway >= 0, got %s' % leeway | |
if len(s) <= length + leeway: | |
return s | |
if killwords: | |
return s[:length - len(end)] + end | |
result = s[:length - len(end)].rsplit(' ', 1)[0] | |
return result + end | |
@environmentfilter | |
def do_wordwrap(environment, s, width=79, break_long_words=True, | |
wrapstring=None): | |
""" | |
Return a copy of the string passed to the filter wrapped after | |
``79`` characters. You can override this default using the first | |
parameter. If you set the second parameter to `false` Jinja will not | |
split words apart if they are longer than `width`. By default, the newlines | |
will be the default newlines for the environment, but this can be changed | |
using the wrapstring keyword argument. | |
.. versionadded:: 2.7 | |
Added support for the `wrapstring` parameter. | |
""" | |
if not wrapstring: | |
wrapstring = environment.newline_sequence | |
import textwrap | |
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False, | |
replace_whitespace=False, | |
break_long_words=break_long_words)) | |
def do_wordcount(s): | |
"""Count the words in that string.""" | |
return len(_word_re.findall(s)) | |
def do_int(value, default=0, base=10): | |
"""Convert the value into an integer. If the | |
conversion doesn't work it will return ``0``. You can | |
override this default using the first parameter. You | |
can also override the default base (10) in the second | |
parameter, which handles input with prefixes such as | |
0b, 0o and 0x for bases 2, 8 and 16 respectively. | |
The base is ignored for decimal numbers and non-string values. | |
""" | |
try: | |
if isinstance(value, string_types): | |
return int(value, base) | |
return int(value) | |
except (TypeError, ValueError): | |
# this quirk is necessary so that "42.23"|int gives 42. | |
try: | |
return int(float(value)) | |
except (TypeError, ValueError): | |
return default | |
def do_float(value, default=0.0): | |
"""Convert the value into a floating point number. If the | |
conversion doesn't work it will return ``0.0``. You can | |
override this default using the first parameter. | |
""" | |
try: | |
return float(value) | |
except (TypeError, ValueError): | |
return default | |
def do_format(value, *args, **kwargs): | |
""" | |
Apply python string formatting on an object: | |
.. sourcecode:: jinja | |
{{ "%s - %s"|format("Hello?", "Foo!") }} | |
-> Hello? - Foo! | |
""" | |
if args and kwargs: | |
raise FilterArgumentError('can\'t handle positional and keyword ' | |
'arguments at the same time') | |
return soft_unicode(value) % (kwargs or args) | |
def do_trim(value): | |
"""Strip leading and trailing whitespace.""" | |
return soft_unicode(value).strip() | |
def do_striptags(value): | |
"""Strip SGML/XML tags and replace adjacent whitespace by one space. | |
""" | |
if hasattr(value, '__html__'): | |
value = value.__html__() | |
return Markup(text_type(value)).striptags() | |
def do_slice(value, slices, fill_with=None): | |
"""Slice an iterator and return a list of lists containing | |
those items. Useful if you want to create a div containing | |
three ul tags that represent columns: | |
.. sourcecode:: html+jinja | |
<div class="columwrapper"> | |
{%- for column in items|slice(3) %} | |
<ul class="column-{{ loop.index }}"> | |
{%- for item in column %} | |
<li>{{ item }}</li> | |
{%- endfor %} | |
</ul> | |
{%- endfor %} | |
</div> | |
If you pass it a second argument it's used to fill missing | |
values on the last iteration. | |
""" | |
seq = list(value) | |
length = len(seq) | |
items_per_slice = length // slices | |
slices_with_extra = length % slices | |
offset = 0 | |
for slice_number in range(slices): | |
start = offset + slice_number * items_per_slice | |
if slice_number < slices_with_extra: | |
offset += 1 | |
end = offset + (slice_number + 1) * items_per_slice | |
tmp = seq[start:end] | |
if fill_with is not None and slice_number >= slices_with_extra: | |
tmp.append(fill_with) | |
yield tmp | |
def do_batch(value, linecount, fill_with=None): | |
""" | |
A filter that batches items. It works pretty much like `slice` | |
just the other way round. It returns a list of lists with the | |
given number of items. If you provide a second parameter this | |
is used to fill up missing items. See this example: | |
.. sourcecode:: html+jinja | |
<table> | |
{%- for row in items|batch(3, ' ') %} | |
<tr> | |
{%- for column in row %} | |
<td>{{ column }}</td> | |
{%- endfor %} | |
</tr> | |
{%- endfor %} | |
</table> | |
""" | |
tmp = [] | |
for item in value: | |
if len(tmp) == linecount: | |
yield tmp | |
tmp = [] | |
tmp.append(item) | |
if tmp: | |
if fill_with is not None and len(tmp) < linecount: | |
tmp += [fill_with] * (linecount - len(tmp)) | |
yield tmp | |
def do_round(value, precision=0, method='common'): | |
"""Round the number to a given precision. The first | |
parameter specifies the precision (default is ``0``), the | |
second the rounding method: | |
- ``'common'`` rounds either up or down | |
- ``'ceil'`` always rounds up | |
- ``'floor'`` always rounds down | |
If you don't specify a method ``'common'`` is used. | |
.. sourcecode:: jinja | |
{{ 42.55|round }} | |
-> 43.0 | |
{{ 42.55|round(1, 'floor') }} | |
-> 42.5 | |
Note that even if rounded to 0 precision, a float is returned. If | |
you need a real integer, pipe it through `int`: | |
.. sourcecode:: jinja | |
{{ 42.55|round|int }} | |
-> 43 | |
""" | |
if not method in ('common', 'ceil', 'floor'): | |
raise FilterArgumentError('method must be common, ceil or floor') | |
if method == 'common': | |
return round(value, precision) | |
func = getattr(math, method) | |
return func(value * (10 ** precision)) / (10 ** precision) | |
# Use a regular tuple repr here. This is what we did in the past and we | |
# really want to hide this custom type as much as possible. In particular | |
# we do not want to accidentally expose an auto generated repr in case | |
# people start to print this out in comments or something similar for | |
# debugging. | |
_GroupTuple = namedtuple('_GroupTuple', ['grouper', 'list']) | |
_GroupTuple.__repr__ = tuple.__repr__ | |
_GroupTuple.__str__ = tuple.__str__ | |
@environmentfilter | |
def do_groupby(environment, value, attribute): | |
"""Group a sequence of objects by a common attribute. | |
If you for example have a list of dicts or objects that represent persons | |
with `gender`, `first_name` and `last_name` attributes and you want to | |
group all users by genders you can do something like the following | |
snippet: | |
.. sourcecode:: html+jinja | |
<ul> | |
{% for group in persons|groupby('gender') %} | |
<li>{{ group.grouper }}<ul> | |
{% for person in group.list %} | |
<li>{{ person.first_name }} {{ person.last_name }}</li> | |
{% endfor %}</ul></li> | |
{% endfor %} | |
</ul> | |
Additionally it's possible to use tuple unpacking for the grouper and | |
list: | |
.. sourcecode:: html+jinja | |
<ul> | |
{% for grouper, list in persons|groupby('gender') %} | |
... | |
{% endfor %} | |
</ul> | |
As you can see the item we're grouping by is stored in the `grouper` | |
attribute and the `list` contains all the objects that have this grouper | |
in common. | |
.. versionchanged:: 2.6 | |
It's now possible to use dotted notation to group by the child | |
attribute of another attribute. | |
""" | |
expr = make_attrgetter(environment, attribute) | |
return [_GroupTuple(key, list(values)) for key, values | |
in groupby(sorted(value, key=expr), expr)] | |
@environmentfilter | |
def do_sum(environment, iterable, attribute=None, start=0): | |
"""Returns the sum of a sequence of numbers plus the value of parameter | |
'start' (which defaults to 0). When the sequence is empty it returns | |
start. | |
It is also possible to sum up only certain attributes: | |
.. sourcecode:: jinja | |
Total: {{ items|sum(attribute='price') }} | |
.. versionchanged:: 2.6 | |
The `attribute` parameter was added to allow suming up over | |
attributes. Also the `start` parameter was moved on to the right. | |
""" | |
if attribute is not None: | |
iterable = imap(make_attrgetter(environment, attribute), iterable) | |
return sum(iterable, start) | |
def do_list(value): | |
"""Convert the value into a list. If it was a string the returned list | |
will be a list of characters. | |
""" | |
return list(value) | |
def do_mark_safe(value): | |
"""Mark the value as safe which means that in an environment with automatic | |
escaping enabled this variable will not be escaped. | |
""" | |
return Markup(value) | |
def do_mark_unsafe(value): | |
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`.""" | |
return text_type(value) | |
def do_reverse(value): | |
"""Reverse the object or return an iterator that iterates over it the other | |
way round. | |
""" | |
if isinstance(value, string_types): | |
return value[::-1] | |
try: | |
return reversed(value) | |
except TypeError: | |
try: | |
rv = list(value) | |
rv.reverse() | |
return rv | |
except TypeError: | |
raise FilterArgumentError('argument must be iterable') | |
@environmentfilter | |
def do_attr(environment, obj, name): | |
"""Get an attribute of an object. ``foo|attr("bar")`` works like | |
``foo.bar`` just that always an attribute is returned and items are not | |
looked up. | |
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details. | |
""" | |
try: | |
name = str(name) | |
except UnicodeError: | |
pass | |
else: | |
try: | |
value = getattr(obj, name) | |
except AttributeError: | |
pass | |
else: | |
if environment.sandboxed and not \ | |
environment.is_safe_attribute(obj, name, value): | |
return environment.unsafe_undefined(obj, name) | |
return value | |
return environment.undefined(obj=obj, name=name) | |
@contextfilter | |
def do_map(*args, **kwargs): | |
"""Applies a filter on a sequence of objects or looks up an attribute. | |
This is useful when dealing with lists of objects but you are really | |
only interested in a certain value of it. | |
The basic usage is mapping on an attribute. Imagine you have a list | |
of users but you are only interested in a list of usernames: | |
.. sourcecode:: jinja | |
Users on this page: {{ users|map(attribute='username')|join(', ') }} | |
Alternatively you can let it invoke a filter by passing the name of the | |
filter and the arguments afterwards. A good example would be applying a | |
text conversion filter on a sequence: | |
.. sourcecode:: jinja | |
Users on this page: {{ titles|map('lower')|join(', ') }} | |
.. versionadded:: 2.7 | |
""" | |
seq, func = prepare_map(args, kwargs) | |
if seq: | |
for item in seq: | |
yield func(item) | |
@contextfilter | |
def do_select(*args, **kwargs): | |
"""Filters a sequence of objects by applying a test to each object, | |
and only selecting the objects with the test succeeding. | |
If no test is specified, each object will be evaluated as a boolean. | |
Example usage: | |
.. sourcecode:: jinja | |
{{ numbers|select("odd") }} | |
{{ numbers|select("odd") }} | |
{{ numbers|select("divisibleby", 3) }} | |
{{ numbers|select("lessthan", 42) }} | |
{{ strings|select("equalto", "mystring") }} | |
.. versionadded:: 2.7 | |
""" | |
return select_or_reject(args, kwargs, lambda x: x, False) | |
@contextfilter | |
def do_reject(*args, **kwargs): | |
"""Filters a sequence of objects by applying a test to each object, | |
and rejecting the objects with the test succeeding. | |
If no test is specified, each object will be evaluated as a boolean. | |
Example usage: | |
.. sourcecode:: jinja | |
{{ numbers|reject("odd") }} | |
.. versionadded:: 2.7 | |
""" | |
return select_or_reject(args, kwargs, lambda x: not x, False) | |
@contextfilter | |
def do_selectattr(*args, **kwargs): | |
"""Filters a sequence of objects by applying a test to the specified | |
attribute of each object, and only selecting the objects with the | |
test succeeding. | |
If no test is specified, the attribute's value will be evaluated as | |
a boolean. | |
Example usage: | |
.. sourcecode:: jinja | |
{{ users|selectattr("is_active") }} | |
{{ users|selectattr("email", "none") }} | |
.. versionadded:: 2.7 | |
""" | |
return select_or_reject(args, kwargs, lambda x: x, True) | |
@contextfilter | |
def do_rejectattr(*args, **kwargs): | |
"""Filters a sequence of objects by applying a test to the specified | |
attribute of each object, and rejecting the objects with the test | |
succeeding. | |
If no test is specified, the attribute's value will be evaluated as | |
a boolean. | |
.. sourcecode:: jinja | |
{{ users|rejectattr("is_active") }} | |
{{ users|rejectattr("email", "none") }} | |
.. versionadded:: 2.7 | |
""" | |
return select_or_reject(args, kwargs, lambda x: not x, True) | |
@evalcontextfilter | |
def do_tojson(eval_ctx, value, indent=None): | |
"""Dumps a structure to JSON so that it's safe to use in ``<script>`` | |
tags. It accepts the same arguments and returns a JSON string. Note that | |
this is available in templates through the ``|tojson`` filter which will | |
also mark the result as safe. Due to how this function escapes certain | |
characters this is safe even if used outside of ``<script>`` tags. | |
The following characters are escaped in strings: | |
- ``<`` | |
- ``>`` | |
- ``&`` | |
- ``'`` | |
This makes it safe to embed such strings in any place in HTML with the | |
notable exception of double quoted attributes. In that case single | |
quote your attributes or HTML escape it in addition. | |
The indent parameter can be used to enable pretty printing. Set it to | |
the number of spaces that the structures should be indented with. | |
Note that this filter is for use in HTML contexts only. | |
.. versionadded:: 2.9 | |
""" | |
policies = eval_ctx.environment.policies | |
dumper = policies['json.dumps_function'] | |
options = policies['json.dumps_kwargs'] | |
if indent is not None: | |
options = dict(options) | |
options['indent'] = indent | |
return htmlsafe_json_dumps(value, dumper=dumper, **options) | |
def prepare_map(args, kwargs): | |
context = args[0] | |
seq = args[1] | |
if len(args) == 2 and 'attribute' in kwargs: | |
attribute = kwargs.pop('attribute') | |
if kwargs: | |
raise FilterArgumentError('Unexpected keyword argument %r' % | |
next(iter(kwargs))) | |
func = make_attrgetter(context.environment, attribute) | |
else: | |
try: | |
name = args[2] | |
args = args[3:] | |
except LookupError: | |
raise FilterArgumentError('map requires a filter argument') | |
func = lambda item: context.environment.call_filter( | |
name, item, args, kwargs, context=context) | |
return seq, func | |
def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr): | |
context = args[0] | |
seq = args[1] | |
if lookup_attr: | |
try: | |
attr = args[2] | |
except LookupError: | |
raise FilterArgumentError('Missing parameter for attribute name') | |
transfunc = make_attrgetter(context.environment, attr) | |
off = 1 | |
else: | |
off = 0 | |
transfunc = lambda x: x | |
try: | |
name = args[2 + off] | |
args = args[3 + off:] | |
func = lambda item: context.environment.call_test( | |
name, item, args, kwargs) | |
except LookupError: | |
func = bool | |
return seq, lambda item: modfunc(func(transfunc(item))) | |
def select_or_reject(args, kwargs, modfunc, lookup_attr): | |
seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) | |
if seq: | |
for item in seq: | |
if func(item): | |
yield item | |
FILTERS = { | |
'abs': abs, | |
'attr': do_attr, | |
'batch': do_batch, | |
'capitalize': do_capitalize, | |
'center': do_center, | |
'count': len, | |
'd': do_default, | |
'default': do_default, | |
'dictsort': do_dictsort, | |
'e': escape, | |
'escape': escape, | |
'filesizeformat': do_filesizeformat, | |
'first': do_first, | |
'float': do_float, | |
'forceescape': do_forceescape, | |
'format': do_format, | |
'groupby': do_groupby, | |
'indent': do_indent, | |
'int': do_int, | |
'join': do_join, | |
'last': do_last, | |
'length': len, | |
'list': do_list, | |
'lower': do_lower, | |
'map': do_map, | |
'min': do_min, | |
'max': do_max, | |
'pprint': do_pprint, | |
'random': do_random, | |
'reject': do_reject, | |
'rejectattr': do_rejectattr, | |
'replace': do_replace, | |
'reverse': do_reverse, | |
'round': do_round, | |
'safe': do_mark_safe, | |
'select': do_select, | |
'selectattr': do_selectattr, | |
'slice': do_slice, | |
'sort': do_sort, | |
'string': soft_unicode, | |
'striptags': do_striptags, | |
'sum': do_sum, | |
'title': do_title, | |
'trim': do_trim, | |
'truncate': do_truncate, | |
'unique': do_unique, | |
'upper': do_upper, | |
'urlencode': do_urlencode, | |
'urlize': do_urlize, | |
'wordcount': do_wordcount, | |
'wordwrap': do_wordwrap, | |
'xmlattr': do_xmlattr, | |
'tojson': do_tojson, | |
} |
from jinja2.visitor import NodeVisitor | |
from jinja2._compat import iteritems | |
VAR_LOAD_PARAMETER = 'param' | |
VAR_LOAD_RESOLVE = 'resolve' | |
VAR_LOAD_ALIAS = 'alias' | |
VAR_LOAD_UNDEFINED = 'undefined' | |
def find_symbols(nodes, parent_symbols=None): | |
sym = Symbols(parent=parent_symbols) | |
visitor = FrameSymbolVisitor(sym) | |
for node in nodes: | |
visitor.visit(node) | |
return sym | |
def symbols_for_node(node, parent_symbols=None): | |
sym = Symbols(parent=parent_symbols) | |
sym.analyze_node(node) | |
return sym | |
class Symbols(object): | |
def __init__(self, parent=None, level=None): | |
if level is None: | |
if parent is None: | |
level = 0 | |
else: | |
level = parent.level + 1 | |
self.level = level | |
self.parent = parent | |
self.refs = {} | |
self.loads = {} | |
self.stores = set() | |
def analyze_node(self, node, **kwargs): | |
visitor = RootVisitor(self) | |
visitor.visit(node, **kwargs) | |
def _define_ref(self, name, load=None): | |
ident = 'l_%d_%s' % (self.level, name) | |
self.refs[name] = ident | |
if load is not None: | |
self.loads[ident] = load | |
return ident | |
def find_load(self, target): | |
if target in self.loads: | |
return self.loads[target] | |
if self.parent is not None: | |
return self.parent.find_load(target) | |
def find_ref(self, name): | |
if name in self.refs: | |
return self.refs[name] | |
if self.parent is not None: | |
return self.parent.find_ref(name) | |
def ref(self, name): | |
rv = self.find_ref(name) | |
if rv is None: | |
raise AssertionError('Tried to resolve a name to a reference that ' | |
'was unknown to the frame (%r)' % name) | |
return rv | |
def copy(self): | |
rv = object.__new__(self.__class__) | |
rv.__dict__.update(self.__dict__) | |
rv.refs = self.refs.copy() | |
rv.loads = self.loads.copy() | |
rv.stores = self.stores.copy() | |
return rv | |
def store(self, name): | |
self.stores.add(name) | |
# If we have not see the name referenced yet, we need to figure | |
# out what to set it to. | |
if name not in self.refs: | |
# If there is a parent scope we check if the name has a | |
# reference there. If it does it means we might have to alias | |
# to a variable there. | |
if self.parent is not None: | |
outer_ref = self.parent.find_ref(name) | |
if outer_ref is not None: | |
self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) | |
return | |
# Otherwise we can just set it to undefined. | |
self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None)) | |
def declare_parameter(self, name): | |
self.stores.add(name) | |
return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None)) | |
def load(self, name): | |
target = self.find_ref(name) | |
if target is None: | |
self._define_ref(name, load=(VAR_LOAD_RESOLVE, name)) | |
def branch_update(self, branch_symbols): | |
stores = {} | |
for branch in branch_symbols: | |
for target in branch.stores: | |
if target in self.stores: | |
continue | |
stores[target] = stores.get(target, 0) + 1 | |
for sym in branch_symbols: | |
self.refs.update(sym.refs) | |
self.loads.update(sym.loads) | |
self.stores.update(sym.stores) | |
for name, branch_count in iteritems(stores): | |
if branch_count == len(branch_symbols): | |
continue | |
target = self.find_ref(name) | |
assert target is not None, 'should not happen' | |
if self.parent is not None: | |
outer_target = self.parent.find_ref(name) | |
if outer_target is not None: | |
self.loads[target] = (VAR_LOAD_ALIAS, outer_target) | |
continue | |
self.loads[target] = (VAR_LOAD_RESOLVE, name) | |
def dump_stores(self): | |
rv = {} | |
node = self | |
while node is not None: | |
for name in node.stores: | |
if name not in rv: | |
rv[name] = self.find_ref(name) | |
node = node.parent | |
return rv | |
def dump_param_targets(self): | |
rv = set() | |
node = self | |
while node is not None: | |
for target, (instr, _) in iteritems(self.loads): | |
if instr == VAR_LOAD_PARAMETER: | |
rv.add(target) | |
node = node.parent | |
return rv | |
class RootVisitor(NodeVisitor): | |
def __init__(self, symbols): | |
self.sym_visitor = FrameSymbolVisitor(symbols) | |
def _simple_visit(self, node, **kwargs): | |
for child in node.iter_child_nodes(): | |
self.sym_visitor.visit(child) | |
visit_Template = visit_Block = visit_Macro = visit_FilterBlock = \ | |
visit_Scope = visit_If = visit_ScopedEvalContextModifier = \ | |
_simple_visit | |
def visit_AssignBlock(self, node, **kwargs): | |
for child in node.body: | |
self.sym_visitor.visit(child) | |
def visit_CallBlock(self, node, **kwargs): | |
for child in node.iter_child_nodes(exclude=('call',)): | |
self.sym_visitor.visit(child) | |
def visit_OverlayScope(self, node, **kwargs): | |
for child in node.body: | |
self.sym_visitor.visit(child) | |
def visit_For(self, node, for_branch='body', **kwargs): | |
if for_branch == 'body': | |
self.sym_visitor.visit(node.target, store_as_param=True) | |
branch = node.body | |
elif for_branch == 'else': | |
branch = node.else_ | |
elif for_branch == 'test': | |
self.sym_visitor.visit(node.target, store_as_param=True) | |
if node.test is not None: | |
self.sym_visitor.visit(node.test) | |
return | |
else: | |
raise RuntimeError('Unknown for branch') | |
for item in branch or (): | |
self.sym_visitor.visit(item) | |
def visit_With(self, node, **kwargs): | |
for target in node.targets: | |
self.sym_visitor.visit(target) | |
for child in node.body: | |
self.sym_visitor.visit(child) | |
def generic_visit(self, node, *args, **kwargs): | |
raise NotImplementedError('Cannot find symbols for %r' % | |
node.__class__.__name__) | |
class FrameSymbolVisitor(NodeVisitor): | |
"""A visitor for `Frame.inspect`.""" | |
def __init__(self, symbols): | |
self.symbols = symbols | |
def visit_Name(self, node, store_as_param=False, **kwargs): | |
"""All assignments to names go through this function.""" | |
if store_as_param or node.ctx == 'param': | |
self.symbols.declare_parameter(node.name) | |
elif node.ctx == 'store': | |
self.symbols.store(node.name) | |
elif node.ctx == 'load': | |
self.symbols.load(node.name) | |
def visit_NSRef(self, node, **kwargs): | |
self.symbols.load(node.name) | |
def visit_If(self, node, **kwargs): | |
self.visit(node.test, **kwargs) | |
original_symbols = self.symbols | |
def inner_visit(nodes): | |
self.symbols = rv = original_symbols.copy() | |
for subnode in nodes: | |
self.visit(subnode, **kwargs) | |
self.symbols = original_symbols | |
return rv | |
body_symbols = inner_visit(node.body) | |
elif_symbols = inner_visit(node.elif_) | |
else_symbols = inner_visit(node.else_ or ()) | |
self.symbols.branch_update([body_symbols, elif_symbols, else_symbols]) | |
def visit_Macro(self, node, **kwargs): | |
self.symbols.store(node.name) | |
def visit_Import(self, node, **kwargs): | |
self.generic_visit(node, **kwargs) | |
self.symbols.store(node.target) | |
def visit_FromImport(self, node, **kwargs): | |
self.generic_visit(node, **kwargs) | |
for name in node.names: | |
if isinstance(name, tuple): | |
self.symbols.store(name[1]) | |
else: | |
self.symbols.store(name) | |
def visit_Assign(self, node, **kwargs): | |
"""Visit assignments in the correct order.""" | |
self.visit(node.node, **kwargs) | |
self.visit(node.target, **kwargs) | |
def visit_For(self, node, **kwargs): | |
"""Visiting stops at for blocks. However the block sequence | |
is visited as part of the outer scope. | |
""" | |
self.visit(node.iter, **kwargs) | |
def visit_CallBlock(self, node, **kwargs): | |
self.visit(node.call, **kwargs) | |
def visit_FilterBlock(self, node, **kwargs): | |
self.visit(node.filter, **kwargs) | |
def visit_With(self, node, **kwargs): | |
for target in node.values: | |
self.visit(target) | |
def visit_AssignBlock(self, node, **kwargs): | |
"""Stop visiting at block assigns.""" | |
self.visit(node.target, **kwargs) | |
def visit_Scope(self, node, **kwargs): | |
"""Stop visiting at scopes.""" | |
def visit_Block(self, node, **kwargs): | |
"""Stop visiting at blocks.""" | |
def visit_OverlayScope(self, node, **kwargs): | |
"""Do not visit into overlay scopes.""" |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.lexer | |
~~~~~~~~~~~~ | |
This module implements a Jinja / Python combination lexer. The | |
`Lexer` class provided by this module is used to do some preprocessing | |
for Jinja. | |
On the one hand it filters out invalid operators like the bitshift | |
operators we don't allow in templates. On the other hand it separates | |
template code and python code in expressions. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import re | |
from collections import deque | |
from operator import itemgetter | |
from jinja2._compat import implements_iterator, intern, iteritems, text_type | |
from jinja2.exceptions import TemplateSyntaxError | |
from jinja2.utils import LRUCache | |
# cache for the lexers. Exists in order to be able to have multiple | |
# environments with the same lexer | |
_lexer_cache = LRUCache(50) | |
# static regular expressions | |
whitespace_re = re.compile(r'\s+', re.U) | |
string_re = re.compile(r"('([^'\\]*(?:\\.[^'\\]*)*)'" | |
r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S) | |
integer_re = re.compile(r'\d+') | |
try: | |
# check if this Python supports Unicode identifiers | |
compile('föö', '<unknown>', 'eval') | |
except SyntaxError: | |
# no Unicode support, use ASCII identifiers | |
name_re = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*') | |
check_ident = False | |
else: | |
# Unicode support, build a pattern to match valid characters, and set flag | |
# to use str.isidentifier to validate during lexing | |
from jinja2 import _identifier | |
name_re = re.compile(r'[\w{0}]+'.format(_identifier.pattern)) | |
check_ident = True | |
# remove the pattern from memory after building the regex | |
import sys | |
del sys.modules['jinja2._identifier'] | |
import jinja2 | |
del jinja2._identifier | |
del _identifier | |
float_re = re.compile(r'(?<!\.)\d+\.\d+') | |
newline_re = re.compile(r'(\r\n|\r|\n)') | |
# internal the tokens and keep references to them | |
TOKEN_ADD = intern('add') | |
TOKEN_ASSIGN = intern('assign') | |
TOKEN_COLON = intern('colon') | |
TOKEN_COMMA = intern('comma') | |
TOKEN_DIV = intern('div') | |
TOKEN_DOT = intern('dot') | |
TOKEN_EQ = intern('eq') | |
TOKEN_FLOORDIV = intern('floordiv') | |
TOKEN_GT = intern('gt') | |
TOKEN_GTEQ = intern('gteq') | |
TOKEN_LBRACE = intern('lbrace') | |
TOKEN_LBRACKET = intern('lbracket') | |
TOKEN_LPAREN = intern('lparen') | |
TOKEN_LT = intern('lt') | |
TOKEN_LTEQ = intern('lteq') | |
TOKEN_MOD = intern('mod') | |
TOKEN_MUL = intern('mul') | |
TOKEN_NE = intern('ne') | |
TOKEN_PIPE = intern('pipe') | |
TOKEN_POW = intern('pow') | |
TOKEN_RBRACE = intern('rbrace') | |
TOKEN_RBRACKET = intern('rbracket') | |
TOKEN_RPAREN = intern('rparen') | |
TOKEN_SEMICOLON = intern('semicolon') | |
TOKEN_SUB = intern('sub') | |
TOKEN_TILDE = intern('tilde') | |
TOKEN_WHITESPACE = intern('whitespace') | |
TOKEN_FLOAT = intern('float') | |
TOKEN_INTEGER = intern('integer') | |
TOKEN_NAME = intern('name') | |
TOKEN_STRING = intern('string') | |
TOKEN_OPERATOR = intern('operator') | |
TOKEN_BLOCK_BEGIN = intern('block_begin') | |
TOKEN_BLOCK_END = intern('block_end') | |
TOKEN_VARIABLE_BEGIN = intern('variable_begin') | |
TOKEN_VARIABLE_END = intern('variable_end') | |
TOKEN_RAW_BEGIN = intern('raw_begin') | |
TOKEN_RAW_END = intern('raw_end') | |
TOKEN_COMMENT_BEGIN = intern('comment_begin') | |
TOKEN_COMMENT_END = intern('comment_end') | |
TOKEN_COMMENT = intern('comment') | |
TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin') | |
TOKEN_LINESTATEMENT_END = intern('linestatement_end') | |
TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin') | |
TOKEN_LINECOMMENT_END = intern('linecomment_end') | |
TOKEN_LINECOMMENT = intern('linecomment') | |
TOKEN_DATA = intern('data') | |
TOKEN_INITIAL = intern('initial') | |
TOKEN_EOF = intern('eof') | |
# bind operators to token types | |
operators = { | |
'+': TOKEN_ADD, | |
'-': TOKEN_SUB, | |
'/': TOKEN_DIV, | |
'//': TOKEN_FLOORDIV, | |
'*': TOKEN_MUL, | |
'%': TOKEN_MOD, | |
'**': TOKEN_POW, | |
'~': TOKEN_TILDE, | |
'[': TOKEN_LBRACKET, | |
']': TOKEN_RBRACKET, | |
'(': TOKEN_LPAREN, | |
')': TOKEN_RPAREN, | |
'{': TOKEN_LBRACE, | |
'}': TOKEN_RBRACE, | |
'==': TOKEN_EQ, | |
'!=': TOKEN_NE, | |
'>': TOKEN_GT, | |
'>=': TOKEN_GTEQ, | |
'<': TOKEN_LT, | |
'<=': TOKEN_LTEQ, | |
'=': TOKEN_ASSIGN, | |
'.': TOKEN_DOT, | |
':': TOKEN_COLON, | |
'|': TOKEN_PIPE, | |
',': TOKEN_COMMA, | |
';': TOKEN_SEMICOLON | |
} | |
reverse_operators = dict([(v, k) for k, v in iteritems(operators)]) | |
assert len(operators) == len(reverse_operators), 'operators dropped' | |
operator_re = re.compile('(%s)' % '|'.join(re.escape(x) for x in | |
sorted(operators, key=lambda x: -len(x)))) | |
ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT, | |
TOKEN_COMMENT_END, TOKEN_WHITESPACE, | |
TOKEN_LINECOMMENT_BEGIN, TOKEN_LINECOMMENT_END, | |
TOKEN_LINECOMMENT]) | |
ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA, | |
TOKEN_COMMENT, TOKEN_LINECOMMENT]) | |
def _describe_token_type(token_type): | |
if token_type in reverse_operators: | |
return reverse_operators[token_type] | |
return { | |
TOKEN_COMMENT_BEGIN: 'begin of comment', | |
TOKEN_COMMENT_END: 'end of comment', | |
TOKEN_COMMENT: 'comment', | |
TOKEN_LINECOMMENT: 'comment', | |
TOKEN_BLOCK_BEGIN: 'begin of statement block', | |
TOKEN_BLOCK_END: 'end of statement block', | |
TOKEN_VARIABLE_BEGIN: 'begin of print statement', | |
TOKEN_VARIABLE_END: 'end of print statement', | |
TOKEN_LINESTATEMENT_BEGIN: 'begin of line statement', | |
TOKEN_LINESTATEMENT_END: 'end of line statement', | |
TOKEN_DATA: 'template data / text', | |
TOKEN_EOF: 'end of template' | |
}.get(token_type, token_type) | |
def describe_token(token): | |
"""Returns a description of the token.""" | |
if token.type == 'name': | |
return token.value | |
return _describe_token_type(token.type) | |
def describe_token_expr(expr): | |
"""Like `describe_token` but for token expressions.""" | |
if ':' in expr: | |
type, value = expr.split(':', 1) | |
if type == 'name': | |
return value | |
else: | |
type = expr | |
return _describe_token_type(type) | |
def count_newlines(value): | |
"""Count the number of newline characters in the string. This is | |
useful for extensions that filter a stream. | |
""" | |
return len(newline_re.findall(value)) | |
def compile_rules(environment): | |
"""Compiles all the rules from the environment into a list of rules.""" | |
e = re.escape | |
rules = [ | |
(len(environment.comment_start_string), 'comment', | |
e(environment.comment_start_string)), | |
(len(environment.block_start_string), 'block', | |
e(environment.block_start_string)), | |
(len(environment.variable_start_string), 'variable', | |
e(environment.variable_start_string)) | |
] | |
if environment.line_statement_prefix is not None: | |
rules.append((len(environment.line_statement_prefix), 'linestatement', | |
r'^[ \t\v]*' + e(environment.line_statement_prefix))) | |
if environment.line_comment_prefix is not None: | |
rules.append((len(environment.line_comment_prefix), 'linecomment', | |
r'(?:^|(?<=\S))[^\S\r\n]*' + | |
e(environment.line_comment_prefix))) | |
return [x[1:] for x in sorted(rules, reverse=True)] | |
class Failure(object): | |
"""Class that raises a `TemplateSyntaxError` if called. | |
Used by the `Lexer` to specify known errors. | |
""" | |
def __init__(self, message, cls=TemplateSyntaxError): | |
self.message = message | |
self.error_class = cls | |
def __call__(self, lineno, filename): | |
raise self.error_class(self.message, lineno, filename) | |
class Token(tuple): | |
"""Token class.""" | |
__slots__ = () | |
lineno, type, value = (property(itemgetter(x)) for x in range(3)) | |
def __new__(cls, lineno, type, value): | |
return tuple.__new__(cls, (lineno, intern(str(type)), value)) | |
def __str__(self): | |
if self.type in reverse_operators: | |
return reverse_operators[self.type] | |
elif self.type == 'name': | |
return self.value | |
return self.type | |
def test(self, expr): | |
"""Test a token against a token expression. This can either be a | |
token type or ``'token_type:token_value'``. This can only test | |
against string values and types. | |
""" | |
# here we do a regular string equality check as test_any is usually | |
# passed an iterable of not interned strings. | |
if self.type == expr: | |
return True | |
elif ':' in expr: | |
return expr.split(':', 1) == [self.type, self.value] | |
return False | |
def test_any(self, *iterable): | |
"""Test against multiple token expressions.""" | |
for expr in iterable: | |
if self.test(expr): | |
return True | |
return False | |
def __repr__(self): | |
return 'Token(%r, %r, %r)' % ( | |
self.lineno, | |
self.type, | |
self.value | |
) | |
@implements_iterator | |
class TokenStreamIterator(object): | |
"""The iterator for tokenstreams. Iterate over the stream | |
until the eof token is reached. | |
""" | |
def __init__(self, stream): | |
self.stream = stream | |
def __iter__(self): | |
return self | |
def __next__(self): | |
token = self.stream.current | |
if token.type is TOKEN_EOF: | |
self.stream.close() | |
raise StopIteration() | |
next(self.stream) | |
return token | |
@implements_iterator | |
class TokenStream(object): | |
"""A token stream is an iterable that yields :class:`Token`\\s. The | |
parser however does not iterate over it but calls :meth:`next` to go | |
one token ahead. The current active token is stored as :attr:`current`. | |
""" | |
def __init__(self, generator, name, filename): | |
self._iter = iter(generator) | |
self._pushed = deque() | |
self.name = name | |
self.filename = filename | |
self.closed = False | |
self.current = Token(1, TOKEN_INITIAL, '') | |
next(self) | |
def __iter__(self): | |
return TokenStreamIterator(self) | |
def __bool__(self): | |
return bool(self._pushed) or self.current.type is not TOKEN_EOF | |
__nonzero__ = __bool__ # py2 | |
eos = property(lambda x: not x, doc="Are we at the end of the stream?") | |
def push(self, token): | |
"""Push a token back to the stream.""" | |
self._pushed.append(token) | |
def look(self): | |
"""Look at the next token.""" | |
old_token = next(self) | |
result = self.current | |
self.push(result) | |
self.current = old_token | |
return result | |
def skip(self, n=1): | |
"""Got n tokens ahead.""" | |
for x in range(n): | |
next(self) | |
def next_if(self, expr): | |
"""Perform the token test and return the token if it matched. | |
Otherwise the return value is `None`. | |
""" | |
if self.current.test(expr): | |
return next(self) | |
def skip_if(self, expr): | |
"""Like :meth:`next_if` but only returns `True` or `False`.""" | |
return self.next_if(expr) is not None | |
def __next__(self): | |
"""Go one token ahead and return the old one. | |
Use the built-in :func:`next` instead of calling this directly. | |
""" | |
rv = self.current | |
if self._pushed: | |
self.current = self._pushed.popleft() | |
elif self.current.type is not TOKEN_EOF: | |
try: | |
self.current = next(self._iter) | |
except StopIteration: | |
self.close() | |
return rv | |
def close(self): | |
"""Close the stream.""" | |
self.current = Token(self.current.lineno, TOKEN_EOF, '') | |
self._iter = None | |
self.closed = True | |
def expect(self, expr): | |
"""Expect a given token type and return it. This accepts the same | |
argument as :meth:`jinja2.lexer.Token.test`. | |
""" | |
if not self.current.test(expr): | |
expr = describe_token_expr(expr) | |
if self.current.type is TOKEN_EOF: | |
raise TemplateSyntaxError('unexpected end of template, ' | |
'expected %r.' % expr, | |
self.current.lineno, | |
self.name, self.filename) | |
raise TemplateSyntaxError("expected token %r, got %r" % | |
(expr, describe_token(self.current)), | |
self.current.lineno, | |
self.name, self.filename) | |
try: | |
return self.current | |
finally: | |
next(self) | |
def get_lexer(environment): | |
"""Return a lexer which is probably cached.""" | |
key = (environment.block_start_string, | |
environment.block_end_string, | |
environment.variable_start_string, | |
environment.variable_end_string, | |
environment.comment_start_string, | |
environment.comment_end_string, | |
environment.line_statement_prefix, | |
environment.line_comment_prefix, | |
environment.trim_blocks, | |
environment.lstrip_blocks, | |
environment.newline_sequence, | |
environment.keep_trailing_newline) | |
lexer = _lexer_cache.get(key) | |
if lexer is None: | |
lexer = Lexer(environment) | |
_lexer_cache[key] = lexer | |
return lexer | |
class Lexer(object): | |
"""Class that implements a lexer for a given environment. Automatically | |
created by the environment class, usually you don't have to do that. | |
Note that the lexer is not automatically bound to an environment. | |
Multiple environments can share the same lexer. | |
""" | |
def __init__(self, environment): | |
# shortcuts | |
c = lambda x: re.compile(x, re.M | re.S) | |
e = re.escape | |
# lexing rules for tags | |
tag_rules = [ | |
(whitespace_re, TOKEN_WHITESPACE, None), | |
(float_re, TOKEN_FLOAT, None), | |
(integer_re, TOKEN_INTEGER, None), | |
(name_re, TOKEN_NAME, None), | |
(string_re, TOKEN_STRING, None), | |
(operator_re, TOKEN_OPERATOR, None) | |
] | |
# assemble the root lexing rule. because "|" is ungreedy | |
# we have to sort by length so that the lexer continues working | |
# as expected when we have parsing rules like <% for block and | |
# <%= for variables. (if someone wants asp like syntax) | |
# variables are just part of the rules if variable processing | |
# is required. | |
root_tag_rules = compile_rules(environment) | |
# block suffix if trimming is enabled | |
block_suffix_re = environment.trim_blocks and '\\n?' or '' | |
# strip leading spaces if lstrip_blocks is enabled | |
prefix_re = {} | |
if environment.lstrip_blocks: | |
# use '{%+' to manually disable lstrip_blocks behavior | |
no_lstrip_re = e('+') | |
# detect overlap between block and variable or comment strings | |
block_diff = c(r'^%s(.*)' % e(environment.block_start_string)) | |
# make sure we don't mistake a block for a variable or a comment | |
m = block_diff.match(environment.comment_start_string) | |
no_lstrip_re += m and r'|%s' % e(m.group(1)) or '' | |
m = block_diff.match(environment.variable_start_string) | |
no_lstrip_re += m and r'|%s' % e(m.group(1)) or '' | |
# detect overlap between comment and variable strings | |
comment_diff = c(r'^%s(.*)' % e(environment.comment_start_string)) | |
m = comment_diff.match(environment.variable_start_string) | |
no_variable_re = m and r'(?!%s)' % e(m.group(1)) or '' | |
lstrip_re = r'^[ \t]*' | |
block_prefix_re = r'%s%s(?!%s)|%s\+?' % ( | |
lstrip_re, | |
e(environment.block_start_string), | |
no_lstrip_re, | |
e(environment.block_start_string), | |
) | |
comment_prefix_re = r'%s%s%s|%s\+?' % ( | |
lstrip_re, | |
e(environment.comment_start_string), | |
no_variable_re, | |
e(environment.comment_start_string), | |
) | |
prefix_re['block'] = block_prefix_re | |
prefix_re['comment'] = comment_prefix_re | |
else: | |
block_prefix_re = '%s' % e(environment.block_start_string) | |
self.newline_sequence = environment.newline_sequence | |
self.keep_trailing_newline = environment.keep_trailing_newline | |
# global lexing rules | |
self.rules = { | |
'root': [ | |
# directives | |
(c('(.*?)(?:%s)' % '|'.join( | |
[r'(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))' % ( | |
e(environment.block_start_string), | |
block_prefix_re, | |
e(environment.block_end_string), | |
e(environment.block_end_string) | |
)] + [ | |
r'(?P<%s_begin>\s*%s\-|%s)' % (n, r, prefix_re.get(n,r)) | |
for n, r in root_tag_rules | |
])), (TOKEN_DATA, '#bygroup'), '#bygroup'), | |
# data | |
(c('.+'), TOKEN_DATA, None) | |
], | |
# comments | |
TOKEN_COMMENT_BEGIN: [ | |
(c(r'(.*?)((?:\-%s\s*|%s)%s)' % ( | |
e(environment.comment_end_string), | |
e(environment.comment_end_string), | |
block_suffix_re | |
)), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'), | |
(c('(.)'), (Failure('Missing end of comment tag'),), None) | |
], | |
# blocks | |
TOKEN_BLOCK_BEGIN: [ | |
(c(r'(?:\-%s\s*|%s)%s' % ( | |
e(environment.block_end_string), | |
e(environment.block_end_string), | |
block_suffix_re | |
)), TOKEN_BLOCK_END, '#pop'), | |
] + tag_rules, | |
# variables | |
TOKEN_VARIABLE_BEGIN: [ | |
(c(r'\-%s\s*|%s' % ( | |
e(environment.variable_end_string), | |
e(environment.variable_end_string) | |
)), TOKEN_VARIABLE_END, '#pop') | |
] + tag_rules, | |
# raw block | |
TOKEN_RAW_BEGIN: [ | |
(c(r'(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))' % ( | |
e(environment.block_start_string), | |
block_prefix_re, | |
e(environment.block_end_string), | |
e(environment.block_end_string), | |
block_suffix_re | |
)), (TOKEN_DATA, TOKEN_RAW_END), '#pop'), | |
(c('(.)'), (Failure('Missing end of raw directive'),), None) | |
], | |
# line statements | |
TOKEN_LINESTATEMENT_BEGIN: [ | |
(c(r'\s*(\n|$)'), TOKEN_LINESTATEMENT_END, '#pop') | |
] + tag_rules, | |
# line comments | |
TOKEN_LINECOMMENT_BEGIN: [ | |
(c(r'(.*?)()(?=\n|$)'), (TOKEN_LINECOMMENT, | |
TOKEN_LINECOMMENT_END), '#pop') | |
] | |
} | |
def _normalize_newlines(self, value): | |
"""Called for strings and template data to normalize it to unicode.""" | |
return newline_re.sub(self.newline_sequence, value) | |
def tokenize(self, source, name=None, filename=None, state=None): | |
"""Calls tokeniter + tokenize and wraps it in a token stream. | |
""" | |
stream = self.tokeniter(source, name, filename, state) | |
return TokenStream(self.wrap(stream, name, filename), name, filename) | |
def wrap(self, stream, name=None, filename=None): | |
"""This is called with the stream as returned by `tokenize` and wraps | |
every token in a :class:`Token` and converts the value. | |
""" | |
for lineno, token, value in stream: | |
if token in ignored_tokens: | |
continue | |
elif token == 'linestatement_begin': | |
token = 'block_begin' | |
elif token == 'linestatement_end': | |
token = 'block_end' | |
# we are not interested in those tokens in the parser | |
elif token in ('raw_begin', 'raw_end'): | |
continue | |
elif token == 'data': | |
value = self._normalize_newlines(value) | |
elif token == 'keyword': | |
token = value | |
elif token == 'name': | |
value = str(value) | |
if check_ident and not value.isidentifier(): | |
raise TemplateSyntaxError( | |
'Invalid character in identifier', | |
lineno, name, filename) | |
elif token == 'string': | |
# try to unescape string | |
try: | |
value = self._normalize_newlines(value[1:-1]) \ | |
.encode('ascii', 'backslashreplace') \ | |
.decode('unicode-escape') | |
except Exception as e: | |
msg = str(e).split(':')[-1].strip() | |
raise TemplateSyntaxError(msg, lineno, name, filename) | |
elif token == 'integer': | |
value = int(value) | |
elif token == 'float': | |
value = float(value) | |
elif token == 'operator': | |
token = operators[value] | |
yield Token(lineno, token, value) | |
def tokeniter(self, source, name, filename=None, state=None): | |
"""This method tokenizes the text and returns the tokens in a | |
generator. Use this method if you just want to tokenize a template. | |
""" | |
source = text_type(source) | |
lines = source.splitlines() | |
if self.keep_trailing_newline and source: | |
for newline in ('\r\n', '\r', '\n'): | |
if source.endswith(newline): | |
lines.append('') | |
break | |
source = '\n'.join(lines) | |
pos = 0 | |
lineno = 1 | |
stack = ['root'] | |
if state is not None and state != 'root': | |
assert state in ('variable', 'block'), 'invalid state' | |
stack.append(state + '_begin') | |
else: | |
state = 'root' | |
statetokens = self.rules[stack[-1]] | |
source_length = len(source) | |
balancing_stack = [] | |
while 1: | |
# tokenizer loop | |
for regex, tokens, new_state in statetokens: | |
m = regex.match(source, pos) | |
# if no match we try again with the next rule | |
if m is None: | |
continue | |
# we only match blocks and variables if braces / parentheses | |
# are balanced. continue parsing with the lower rule which | |
# is the operator rule. do this only if the end tags look | |
# like operators | |
if balancing_stack and \ | |
tokens in ('variable_end', 'block_end', | |
'linestatement_end'): | |
continue | |
# tuples support more options | |
if isinstance(tokens, tuple): | |
for idx, token in enumerate(tokens): | |
# failure group | |
if token.__class__ is Failure: | |
raise token(lineno, filename) | |
# bygroup is a bit more complex, in that case we | |
# yield for the current token the first named | |
# group that matched | |
elif token == '#bygroup': | |
for key, value in iteritems(m.groupdict()): | |
if value is not None: | |
yield lineno, key, value | |
lineno += value.count('\n') | |
break | |
else: | |
raise RuntimeError('%r wanted to resolve ' | |
'the token dynamically' | |
' but no group matched' | |
% regex) | |
# normal group | |
else: | |
data = m.group(idx + 1) | |
if data or token not in ignore_if_empty: | |
yield lineno, token, data | |
lineno += data.count('\n') | |
# strings as token just are yielded as it. | |
else: | |
data = m.group() | |
# update brace/parentheses balance | |
if tokens == 'operator': | |
if data == '{': | |
balancing_stack.append('}') | |
elif data == '(': | |
balancing_stack.append(')') | |
elif data == '[': | |
balancing_stack.append(']') | |
elif data in ('}', ')', ']'): | |
if not balancing_stack: | |
raise TemplateSyntaxError('unexpected \'%s\'' % | |
data, lineno, name, | |
filename) | |
expected_op = balancing_stack.pop() | |
if expected_op != data: | |
raise TemplateSyntaxError('unexpected \'%s\', ' | |
'expected \'%s\'' % | |
(data, expected_op), | |
lineno, name, | |
filename) | |
# yield items | |
if data or tokens not in ignore_if_empty: | |
yield lineno, tokens, data | |
lineno += data.count('\n') | |
# fetch new position into new variable so that we can check | |
# if there is a internal parsing error which would result | |
# in an infinite loop | |
pos2 = m.end() | |
# handle state changes | |
if new_state is not None: | |
# remove the uppermost state | |
if new_state == '#pop': | |
stack.pop() | |
# resolve the new state by group checking | |
elif new_state == '#bygroup': | |
for key, value in iteritems(m.groupdict()): | |
if value is not None: | |
stack.append(key) | |
break | |
else: | |
raise RuntimeError('%r wanted to resolve the ' | |
'new state dynamically but' | |
' no group matched' % | |
regex) | |
# direct state name given | |
else: | |
stack.append(new_state) | |
statetokens = self.rules[stack[-1]] | |
# we are still at the same position and no stack change. | |
# this means a loop without break condition, avoid that and | |
# raise error | |
elif pos2 == pos: | |
raise RuntimeError('%r yielded empty string without ' | |
'stack change' % regex) | |
# publish new function and start again | |
pos = pos2 | |
break | |
# if loop terminated without break we haven't found a single match | |
# either we are at the end of the file or we have a problem | |
else: | |
# end of text | |
if pos >= source_length: | |
return | |
# something went wrong | |
raise TemplateSyntaxError('unexpected char %r at %d' % | |
(source[pos], pos), lineno, | |
name, filename) |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.loaders | |
~~~~~~~~~~~~~~ | |
Jinja loader classes. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import os | |
import sys | |
import weakref | |
from types import ModuleType | |
from os import path | |
from hashlib import sha1 | |
from jinja2.exceptions import TemplateNotFound | |
from jinja2.utils import open_if_exists, internalcode | |
from jinja2._compat import string_types, iteritems | |
def split_template_path(template): | |
"""Split a path into segments and perform a sanity check. If it detects | |
'..' in the path it will raise a `TemplateNotFound` error. | |
""" | |
pieces = [] | |
for piece in template.split('/'): | |
if path.sep in piece \ | |
or (path.altsep and path.altsep in piece) or \ | |
piece == path.pardir: | |
raise TemplateNotFound(template) | |
elif piece and piece != '.': | |
pieces.append(piece) | |
return pieces | |
class BaseLoader(object): | |
"""Baseclass for all loaders. Subclass this and override `get_source` to | |
implement a custom loading mechanism. The environment provides a | |
`get_template` method that calls the loader's `load` method to get the | |
:class:`Template` object. | |
A very basic example for a loader that looks up templates on the file | |
system could look like this:: | |
from jinja2 import BaseLoader, TemplateNotFound | |
from os.path import join, exists, getmtime | |
class MyLoader(BaseLoader): | |
def __init__(self, path): | |
self.path = path | |
def get_source(self, environment, template): | |
path = join(self.path, template) | |
if not exists(path): | |
raise TemplateNotFound(template) | |
mtime = getmtime(path) | |
with file(path) as f: | |
source = f.read().decode('utf-8') | |
return source, path, lambda: mtime == getmtime(path) | |
""" | |
#: if set to `False` it indicates that the loader cannot provide access | |
#: to the source of templates. | |
#: | |
#: .. versionadded:: 2.4 | |
has_source_access = True | |
def get_source(self, environment, template): | |
"""Get the template source, filename and reload helper for a template. | |
It's passed the environment and template name and has to return a | |
tuple in the form ``(source, filename, uptodate)`` or raise a | |
`TemplateNotFound` error if it can't locate the template. | |
The source part of the returned tuple must be the source of the | |
template as unicode string or a ASCII bytestring. The filename should | |
be the name of the file on the filesystem if it was loaded from there, | |
otherwise `None`. The filename is used by python for the tracebacks | |
if no loader extension is used. | |
The last item in the tuple is the `uptodate` function. If auto | |
reloading is enabled it's always called to check if the template | |
changed. No arguments are passed so the function must store the | |
old state somewhere (for example in a closure). If it returns `False` | |
the template will be reloaded. | |
""" | |
if not self.has_source_access: | |
raise RuntimeError('%s cannot provide access to the source' % | |
self.__class__.__name__) | |
raise TemplateNotFound(template) | |
def list_templates(self): | |
"""Iterates over all templates. If the loader does not support that | |
it should raise a :exc:`TypeError` which is the default behavior. | |
""" | |
raise TypeError('this loader cannot iterate over all templates') | |
@internalcode | |
def load(self, environment, name, globals=None): | |
"""Loads a template. This method looks up the template in the cache | |
or loads one by calling :meth:`get_source`. Subclasses should not | |
override this method as loaders working on collections of other | |
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) | |
will not call this method but `get_source` directly. | |
""" | |
code = None | |
if globals is None: | |
globals = {} | |
# first we try to get the source for this template together | |
# with the filename and the uptodate function. | |
source, filename, uptodate = self.get_source(environment, name) | |
# try to load the code from the bytecode cache if there is a | |
# bytecode cache configured. | |
bcc = environment.bytecode_cache | |
if bcc is not None: | |
bucket = bcc.get_bucket(environment, name, filename, source) | |
code = bucket.code | |
# if we don't have code so far (not cached, no longer up to | |
# date) etc. we compile the template | |
if code is None: | |
code = environment.compile(source, name, filename) | |
# if the bytecode cache is available and the bucket doesn't | |
# have a code so far, we give the bucket the new code and put | |
# it back to the bytecode cache. | |
if bcc is not None and bucket.code is None: | |
bucket.code = code | |
bcc.set_bucket(bucket) | |
return environment.template_class.from_code(environment, code, | |
globals, uptodate) | |
class FileSystemLoader(BaseLoader): | |
"""Loads templates from the file system. This loader can find templates | |
in folders on the file system and is the preferred way to load them. | |
The loader takes the path to the templates as string, or if multiple | |
locations are wanted a list of them which is then looked up in the | |
given order:: | |
>>> loader = FileSystemLoader('/path/to/templates') | |
>>> loader = FileSystemLoader(['/path/to/templates', '/other/path']) | |
Per default the template encoding is ``'utf-8'`` which can be changed | |
by setting the `encoding` parameter to something else. | |
To follow symbolic links, set the *followlinks* parameter to ``True``:: | |
>>> loader = FileSystemLoader('/path/to/templates', followlinks=True) | |
.. versionchanged:: 2.8+ | |
The *followlinks* parameter was added. | |
""" | |
def __init__(self, searchpath, encoding='utf-8', followlinks=False): | |
if isinstance(searchpath, string_types): | |
searchpath = [searchpath] | |
self.searchpath = list(searchpath) | |
self.encoding = encoding | |
self.followlinks = followlinks | |
def get_source(self, environment, template): | |
pieces = split_template_path(template) | |
for searchpath in self.searchpath: | |
filename = path.join(searchpath, *pieces) | |
f = open_if_exists(filename) | |
if f is None: | |
continue | |
try: | |
contents = f.read().decode(self.encoding) | |
finally: | |
f.close() | |
mtime = path.getmtime(filename) | |
def uptodate(): | |
try: | |
return path.getmtime(filename) == mtime | |
except OSError: | |
return False | |
return contents, filename, uptodate | |
raise TemplateNotFound(template) | |
def list_templates(self): | |
found = set() | |
for searchpath in self.searchpath: | |
walk_dir = os.walk(searchpath, followlinks=self.followlinks) | |
for dirpath, dirnames, filenames in walk_dir: | |
for filename in filenames: | |
template = os.path.join(dirpath, filename) \ | |
[len(searchpath):].strip(os.path.sep) \ | |
.replace(os.path.sep, '/') | |
if template[:2] == './': | |
template = template[2:] | |
if template not in found: | |
found.add(template) | |
return sorted(found) | |
class PackageLoader(BaseLoader): | |
"""Load templates from python eggs or packages. It is constructed with | |
the name of the python package and the path to the templates in that | |
package:: | |
loader = PackageLoader('mypackage', 'views') | |
If the package path is not given, ``'templates'`` is assumed. | |
Per default the template encoding is ``'utf-8'`` which can be changed | |
by setting the `encoding` parameter to something else. Due to the nature | |
of eggs it's only possible to reload templates if the package was loaded | |
from the file system and not a zip file. | |
""" | |
def __init__(self, package_name, package_path='templates', | |
encoding='utf-8'): | |
from pkg_resources import DefaultProvider, ResourceManager, \ | |
get_provider | |
provider = get_provider(package_name) | |
self.encoding = encoding | |
self.manager = ResourceManager() | |
self.filesystem_bound = isinstance(provider, DefaultProvider) | |
self.provider = provider | |
self.package_path = package_path | |
def get_source(self, environment, template): | |
pieces = split_template_path(template) | |
p = '/'.join((self.package_path,) + tuple(pieces)) | |
if not self.provider.has_resource(p): | |
raise TemplateNotFound(template) | |
filename = uptodate = None | |
if self.filesystem_bound: | |
filename = self.provider.get_resource_filename(self.manager, p) | |
mtime = path.getmtime(filename) | |
def uptodate(): | |
try: | |
return path.getmtime(filename) == mtime | |
except OSError: | |
return False | |
source = self.provider.get_resource_string(self.manager, p) | |
return source.decode(self.encoding), filename, uptodate | |
def list_templates(self): | |
path = self.package_path | |
if path[:2] == './': | |
path = path[2:] | |
elif path == '.': | |
path = '' | |
offset = len(path) | |
results = [] | |
def _walk(path): | |
for filename in self.provider.resource_listdir(path): | |
fullname = path + '/' + filename | |
if self.provider.resource_isdir(fullname): | |
_walk(fullname) | |
else: | |
results.append(fullname[offset:].lstrip('/')) | |
_walk(path) | |
results.sort() | |
return results | |
class DictLoader(BaseLoader): | |
"""Loads a template from a python dict. It's passed a dict of unicode | |
strings bound to template names. This loader is useful for unittesting: | |
>>> loader = DictLoader({'index.html': 'source here'}) | |
Because auto reloading is rarely useful this is disabled per default. | |
""" | |
def __init__(self, mapping): | |
self.mapping = mapping | |
def get_source(self, environment, template): | |
if template in self.mapping: | |
source = self.mapping[template] | |
return source, None, lambda: source == self.mapping.get(template) | |
raise TemplateNotFound(template) | |
def list_templates(self): | |
return sorted(self.mapping) | |
class FunctionLoader(BaseLoader): | |
"""A loader that is passed a function which does the loading. The | |
function receives the name of the template and has to return either | |
an unicode string with the template source, a tuple in the form ``(source, | |
filename, uptodatefunc)`` or `None` if the template does not exist. | |
>>> def load_template(name): | |
... if name == 'index.html': | |
... return '...' | |
... | |
>>> loader = FunctionLoader(load_template) | |
The `uptodatefunc` is a function that is called if autoreload is enabled | |
and has to return `True` if the template is still up to date. For more | |
details have a look at :meth:`BaseLoader.get_source` which has the same | |
return value. | |
""" | |
def __init__(self, load_func): | |
self.load_func = load_func | |
def get_source(self, environment, template): | |
rv = self.load_func(template) | |
if rv is None: | |
raise TemplateNotFound(template) | |
elif isinstance(rv, string_types): | |
return rv, None, None | |
return rv | |
class PrefixLoader(BaseLoader): | |
"""A loader that is passed a dict of loaders where each loader is bound | |
to a prefix. The prefix is delimited from the template by a slash per | |
default, which can be changed by setting the `delimiter` argument to | |
something else:: | |
loader = PrefixLoader({ | |
'app1': PackageLoader('mypackage.app1'), | |
'app2': PackageLoader('mypackage.app2') | |
}) | |
By loading ``'app1/index.html'`` the file from the app1 package is loaded, | |
by loading ``'app2/index.html'`` the file from the second. | |
""" | |
def __init__(self, mapping, delimiter='/'): | |
self.mapping = mapping | |
self.delimiter = delimiter | |
def get_loader(self, template): | |
try: | |
prefix, name = template.split(self.delimiter, 1) | |
loader = self.mapping[prefix] | |
except (ValueError, KeyError): | |
raise TemplateNotFound(template) | |
return loader, name | |
def get_source(self, environment, template): | |
loader, name = self.get_loader(template) | |
try: | |
return loader.get_source(environment, name) | |
except TemplateNotFound: | |
# re-raise the exception with the correct filename here. | |
# (the one that includes the prefix) | |
raise TemplateNotFound(template) | |
@internalcode | |
def load(self, environment, name, globals=None): | |
loader, local_name = self.get_loader(name) | |
try: | |
return loader.load(environment, local_name, globals) | |
except TemplateNotFound: | |
# re-raise the exception with the correct filename here. | |
# (the one that includes the prefix) | |
raise TemplateNotFound(name) | |
def list_templates(self): | |
result = [] | |
for prefix, loader in iteritems(self.mapping): | |
for template in loader.list_templates(): | |
result.append(prefix + self.delimiter + template) | |
return result | |
class ChoiceLoader(BaseLoader): | |
"""This loader works like the `PrefixLoader` just that no prefix is | |
specified. If a template could not be found by one loader the next one | |
is tried. | |
>>> loader = ChoiceLoader([ | |
... FileSystemLoader('/path/to/user/templates'), | |
... FileSystemLoader('/path/to/system/templates') | |
... ]) | |
This is useful if you want to allow users to override builtin templates | |
from a different location. | |
""" | |
def __init__(self, loaders): | |
self.loaders = loaders | |
def get_source(self, environment, template): | |
for loader in self.loaders: | |
try: | |
return loader.get_source(environment, template) | |
except TemplateNotFound: | |
pass | |
raise TemplateNotFound(template) | |
@internalcode | |
def load(self, environment, name, globals=None): | |
for loader in self.loaders: | |
try: | |
return loader.load(environment, name, globals) | |
except TemplateNotFound: | |
pass | |
raise TemplateNotFound(name) | |
def list_templates(self): | |
found = set() | |
for loader in self.loaders: | |
found.update(loader.list_templates()) | |
return sorted(found) | |
class _TemplateModule(ModuleType): | |
"""Like a normal module but with support for weak references""" | |
class ModuleLoader(BaseLoader): | |
"""This loader loads templates from precompiled templates. | |
Example usage: | |
>>> loader = ChoiceLoader([ | |
... ModuleLoader('/path/to/compiled/templates'), | |
... FileSystemLoader('/path/to/templates') | |
... ]) | |
Templates can be precompiled with :meth:`Environment.compile_templates`. | |
""" | |
has_source_access = False | |
def __init__(self, path): | |
package_name = '_jinja2_module_templates_%x' % id(self) | |
# create a fake module that looks for the templates in the | |
# path given. | |
mod = _TemplateModule(package_name) | |
if isinstance(path, string_types): | |
path = [path] | |
else: | |
path = list(path) | |
mod.__path__ = path | |
sys.modules[package_name] = weakref.proxy(mod, | |
lambda x: sys.modules.pop(package_name, None)) | |
# the only strong reference, the sys.modules entry is weak | |
# so that the garbage collector can remove it once the | |
# loader that created it goes out of business. | |
self.module = mod | |
self.package_name = package_name | |
@staticmethod | |
def get_template_key(name): | |
return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest() | |
@staticmethod | |
def get_module_filename(name): | |
return ModuleLoader.get_template_key(name) + '.py' | |
@internalcode | |
def load(self, environment, name, globals=None): | |
key = self.get_template_key(name) | |
module = '%s.%s' % (self.package_name, key) | |
mod = getattr(self.module, module, None) | |
if mod is None: | |
try: | |
mod = __import__(module, None, None, ['root']) | |
except ImportError: | |
raise TemplateNotFound(name) | |
# remove the entry from sys.modules, we only want the attribute | |
# on the module object we have stored on the loader. | |
sys.modules.pop(module, None) | |
return environment.template_class.from_module_dict( | |
environment, mod.__dict__, globals) |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.meta | |
~~~~~~~~~~~ | |
This module implements various functions that exposes information about | |
templates that might be interesting for various kinds of applications. | |
:copyright: (c) 2017 by the Jinja Team, see AUTHORS for more details. | |
:license: BSD, see LICENSE for more details. | |
""" | |
from jinja2 import nodes | |
from jinja2.compiler import CodeGenerator | |
from jinja2._compat import string_types, iteritems | |
class TrackingCodeGenerator(CodeGenerator): | |
"""We abuse the code generator for introspection.""" | |
def __init__(self, environment): | |
CodeGenerator.__init__(self, environment, '<introspection>', | |
'<introspection>') | |
self.undeclared_identifiers = set() | |
def write(self, x): | |
"""Don't write.""" | |
def enter_frame(self, frame): | |
"""Remember all undeclared identifiers.""" | |
CodeGenerator.enter_frame(self, frame) | |
for _, (action, param) in iteritems(frame.symbols.loads): | |
if action == 'resolve': | |
self.undeclared_identifiers.add(param) | |
def find_undeclared_variables(ast): | |
"""Returns a set of all variables in the AST that will be looked up from | |
the context at runtime. Because at compile time it's not known which | |
variables will be used depending on the path the execution takes at | |
runtime, all variables are returned. | |
>>> from jinja2 import Environment, meta | |
>>> env = Environment() | |
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') | |
>>> meta.find_undeclared_variables(ast) == set(['bar']) | |
True | |
.. admonition:: Implementation | |
Internally the code generator is used for finding undeclared variables. | |
This is good to know because the code generator might raise a | |
:exc:`TemplateAssertionError` during compilation and as a matter of | |
fact this function can currently raise that exception as well. | |
""" | |
codegen = TrackingCodeGenerator(ast.environment) | |
codegen.visit(ast) | |
return codegen.undeclared_identifiers | |
def find_referenced_templates(ast): | |
"""Finds all the referenced templates from the AST. This will return an | |
iterator over all the hardcoded template extensions, inclusions and | |
imports. If dynamic inheritance or inclusion is used, `None` will be | |
yielded. | |
>>> from jinja2 import Environment, meta | |
>>> env = Environment() | |
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') | |
>>> list(meta.find_referenced_templates(ast)) | |
['layout.html', None] | |
This function is useful for dependency tracking. For example if you want | |
to rebuild parts of the website after a layout template has changed. | |
""" | |
for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import, | |
nodes.Include)): | |
if not isinstance(node.template, nodes.Const): | |
# a tuple with some non consts in there | |
if isinstance(node.template, (nodes.Tuple, nodes.List)): | |
for template_name in node.template.items: | |
# something const, only yield the strings and ignore | |
# non-string consts that really just make no sense | |
if isinstance(template_name, nodes.Const): | |
if isinstance(template_name.value, string_types): | |
yield template_name.value | |
# something dynamic in there | |
else: | |
yield None | |
# something dynamic we don't know about here | |
else: | |
yield None | |
continue | |
# constant is a basestring, direct template name | |
if isinstance(node.template.value, string_types): | |
yield node.template.value | |
# a tuple or list (latter *should* not happen) made of consts, | |
# yield the consts that are strings. We could warn here for | |
# non string values | |
elif isinstance(node, nodes.Include) and \ | |
isinstance(node.template.value, (tuple, list)): | |
for template_name in node.template.value: | |
if isinstance(template_name, string_types): | |
yield template_name | |
# something else we don't care about, we could warn here | |
else: | |
yield None |
import sys | |
from ast import literal_eval | |
from itertools import islice, chain | |
from jinja2 import nodes | |
from jinja2._compat import text_type | |
from jinja2.compiler import CodeGenerator, has_safe_repr | |
from jinja2.environment import Environment, Template | |
from jinja2.utils import concat, escape | |
def native_concat(nodes): | |
"""Return a native Python type from the list of compiled nodes. If the | |
result is a single node, its value is returned. Otherwise, the nodes are | |
concatenated as strings. If the result can be parsed with | |
:func:`ast.literal_eval`, the parsed value is returned. Otherwise, the | |
string is returned. | |
""" | |
head = list(islice(nodes, 2)) | |
if not head: | |
return None | |
if len(head) == 1: | |
out = head[0] | |
else: | |
out = u''.join([text_type(v) for v in chain(head, nodes)]) | |
try: | |
return literal_eval(out) | |
except (ValueError, SyntaxError, MemoryError): | |
return out | |
class NativeCodeGenerator(CodeGenerator): | |
"""A code generator which avoids injecting ``to_string()`` calls around the | |
internal code Jinja uses to render templates. | |
""" | |
def visit_Output(self, node, frame): | |
"""Same as :meth:`CodeGenerator.visit_Output`, but do not call | |
``to_string`` on output nodes in generated code. | |
""" | |
if self.has_known_extends and frame.require_output_check: | |
return | |
finalize = self.environment.finalize | |
finalize_context = getattr(finalize, 'contextfunction', False) | |
finalize_eval = getattr(finalize, 'evalcontextfunction', False) | |
finalize_env = getattr(finalize, 'environmentfunction', False) | |
if finalize is not None: | |
if finalize_context or finalize_eval: | |
const_finalize = None | |
elif finalize_env: | |
def const_finalize(x): | |
return finalize(self.environment, x) | |
else: | |
const_finalize = finalize | |
else: | |
def const_finalize(x): | |
return x | |
# If we are inside a frame that requires output checking, we do so. | |
outdent_later = False | |
if frame.require_output_check: | |
self.writeline('if parent_template is None:') | |
self.indent() | |
outdent_later = True | |
# Try to evaluate as many chunks as possible into a static string at | |
# compile time. | |
body = [] | |
for child in node.nodes: | |
try: | |
if const_finalize is None: | |
raise nodes.Impossible() | |
const = child.as_const(frame.eval_ctx) | |
if not has_safe_repr(const): | |
raise nodes.Impossible() | |
except nodes.Impossible: | |
body.append(child) | |
continue | |
# the frame can't be volatile here, because otherwise the as_const | |
# function would raise an Impossible exception at that point | |
try: | |
if frame.eval_ctx.autoescape: | |
if hasattr(const, '__html__'): | |
const = const.__html__() | |
else: | |
const = escape(const) | |
const = const_finalize(const) | |
except Exception: | |
# if something goes wrong here we evaluate the node at runtime | |
# for easier debugging | |
body.append(child) | |
continue | |
if body and isinstance(body[-1], list): | |
body[-1].append(const) | |
else: | |
body.append([const]) | |
# if we have less than 3 nodes or a buffer we yield or extend/append | |
if len(body) < 3 or frame.buffer is not None: | |
if frame.buffer is not None: | |
# for one item we append, for more we extend | |
if len(body) == 1: | |
self.writeline('%s.append(' % frame.buffer) | |
else: | |
self.writeline('%s.extend((' % frame.buffer) | |
self.indent() | |
for item in body: | |
if isinstance(item, list): | |
val = repr(native_concat(item)) | |
if frame.buffer is None: | |
self.writeline('yield ' + val) | |
else: | |
self.writeline(val + ',') | |
else: | |
if frame.buffer is None: | |
self.writeline('yield ', item) | |
else: | |
self.newline(item) | |
close = 0 | |
if finalize is not None: | |
self.write('environment.finalize(') | |
if finalize_context: | |
self.write('context, ') | |
close += 1 | |
self.visit(item, frame) | |
if close > 0: | |
self.write(')' * close) | |
if frame.buffer is not None: | |
self.write(',') | |
if frame.buffer is not None: | |
# close the open parentheses | |
self.outdent() | |
self.writeline(len(body) == 1 and ')' or '))') | |
# otherwise we create a format string as this is faster in that case | |
else: | |
format = [] | |
arguments = [] | |
for item in body: | |
if isinstance(item, list): | |
format.append(native_concat(item).replace('%', '%%')) | |
else: | |
format.append('%s') | |
arguments.append(item) | |
self.writeline('yield ') | |
self.write(repr(concat(format)) + ' % (') | |
self.indent() | |
for argument in arguments: | |
self.newline(argument) | |
close = 0 | |
if finalize is not None: | |
self.write('environment.finalize(') | |
if finalize_context: | |
self.write('context, ') | |
elif finalize_eval: | |
self.write('context.eval_ctx, ') | |
elif finalize_env: | |
self.write('environment, ') | |
close += 1 | |
self.visit(argument, frame) | |
self.write(')' * close + ', ') | |
self.outdent() | |
self.writeline(')') | |
if outdent_later: | |
self.outdent() | |
class NativeTemplate(Template): | |
def render(self, *args, **kwargs): | |
"""Render the template to produce a native Python type. If the result | |
is a single node, its value is returned. Otherwise, the nodes are | |
concatenated as strings. If the result can be parsed with | |
:func:`ast.literal_eval`, the parsed value is returned. Otherwise, the | |
string is returned. | |
""" | |
vars = dict(*args, **kwargs) | |
try: | |
return native_concat(self.root_render_func(self.new_context(vars))) | |
except Exception: | |
exc_info = sys.exc_info() | |
return self.environment.handle_exception(exc_info, True) | |
class NativeEnvironment(Environment): | |
"""An environment that renders templates to native Python types.""" | |
code_generator_class = NativeCodeGenerator | |
template_class = NativeTemplate |
..\markupsafe\__init__.py | |
..\markupsafe\__pycache__\__init__.cpython-37.pyc | |
..\markupsafe\__pycache__\_compat.cpython-37.pyc | |
..\markupsafe\__pycache__\_constants.cpython-37.pyc | |
..\markupsafe\__pycache__\_native.cpython-37.pyc | |
..\markupsafe\_compat.py | |
..\markupsafe\_constants.py | |
..\markupsafe\_native.py | |
..\markupsafe\_speedups.c | |
PKG-INFO | |
SOURCES.txt | |
dependency_links.txt | |
not-zip-safe | |
top_level.txt |
Metadata-Version: 1.1 | |
Name: MarkupSafe | |
Version: 1.0 | |
Summary: Implements a XML/HTML/XHTML Markup safe string for Python | |
Home-page: http://github.com/pallets/markupsafe | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
License: BSD | |
Description: MarkupSafe | |
========== | |
Implements a unicode subclass that supports HTML strings: | |
.. code-block:: python | |
>>> from markupsafe import Markup, escape | |
>>> escape("<script>alert(document.cookie);</script>") | |
Markup(u'<script>alert(document.cookie);</script>') | |
>>> tmpl = Markup("<em>%s</em>") | |
>>> tmpl % "Peter > Lustig" | |
Markup(u'<em>Peter > Lustig</em>') | |
If you want to make an object unicode that is not yet unicode | |
but don't want to lose the taint information, you can use the | |
``soft_unicode`` function. (On Python 3 you can also use ``soft_str`` which | |
is a different name for the same function). | |
.. code-block:: python | |
>>> from markupsafe import soft_unicode | |
>>> soft_unicode(42) | |
u'42' | |
>>> soft_unicode(Markup('foo')) | |
Markup(u'foo') | |
HTML Representations | |
-------------------- | |
Objects can customize their HTML markup equivalent by overriding | |
the ``__html__`` function: | |
.. code-block:: python | |
>>> class Foo(object): | |
... def __html__(self): | |
... return '<strong>Nice</strong>' | |
... | |
>>> escape(Foo()) | |
Markup(u'<strong>Nice</strong>') | |
>>> Markup(Foo()) | |
Markup(u'<strong>Nice</strong>') | |
Silent Escapes | |
-------------- | |
Since MarkupSafe 0.10 there is now also a separate escape function | |
called ``escape_silent`` that returns an empty string for ``None`` for | |
consistency with other systems that return empty strings for ``None`` | |
when escaping (for instance Pylons' webhelpers). | |
If you also want to use this for the escape method of the Markup | |
object, you can create your own subclass that does that: | |
.. code-block:: python | |
from markupsafe import Markup, escape_silent as escape | |
class SilentMarkup(Markup): | |
__slots__ = () | |
@classmethod | |
def escape(cls, s): | |
return cls(escape(s)) | |
New-Style String Formatting | |
--------------------------- | |
Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and | |
3.x are now fully supported. Previously the escape behavior of those | |
functions was spotty at best. The new implementations operates under the | |
following algorithm: | |
1. if an object has an ``__html_format__`` method it is called as | |
replacement for ``__format__`` with the format specifier. It either | |
has to return a string or markup object. | |
2. if an object has an ``__html__`` method it is called. | |
3. otherwise the default format system of Python kicks in and the result | |
is HTML escaped. | |
Here is how you can implement your own formatting: | |
.. code-block:: python | |
class User(object): | |
def __init__(self, id, username): | |
self.id = id | |
self.username = username | |
def __html_format__(self, format_spec): | |
if format_spec == 'link': | |
return Markup('<a href="/user/{0}">{1}</a>').format( | |
self.id, | |
self.__html__(), | |
) | |
elif format_spec: | |
raise ValueError('Invalid format spec') | |
return self.__html__() | |
def __html__(self): | |
return Markup('<span class=user>{0}</span>').format(self.username) | |
And to format that user: | |
.. code-block:: python | |
>>> user = User(1, 'foo') | |
>>> Markup('<p>User: {0:link}').format(user) | |
Markup(u'<p>User: <a href="/user/1"><span class=user>foo</span></a>') | |
Markupsafe supports Python 2.6, 2.7 and Python 3.3 and higher. | |
Platform: UNKNOWN | |
Classifier: Development Status :: 5 - Production/Stable | |
Classifier: Environment :: Web Environment | |
Classifier: Intended Audience :: Developers | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Operating System :: OS Independent | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 3 | |
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content | |
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |
Classifier: Topic :: Text Processing :: Markup :: HTML |
AUTHORS | |
CHANGES | |
LICENSE | |
MANIFEST.in | |
README.rst | |
setup.cfg | |
setup.py | |
tests.py | |
MarkupSafe.egg-info/PKG-INFO | |
MarkupSafe.egg-info/SOURCES.txt | |
MarkupSafe.egg-info/dependency_links.txt | |
MarkupSafe.egg-info/not-zip-safe | |
MarkupSafe.egg-info/top_level.txt | |
markupsafe/__init__.py | |
markupsafe/_compat.py | |
markupsafe/_constants.py | |
markupsafe/_native.py | |
markupsafe/_speedups.c |
markupsafe |
Werkzeug is a comprehensive WSGI web application library. It began as a simple collection of various utilities for WSGI applications and has become one of the most advanced WSGI utility libraries.
It includes:
- An interactive debugger that allows inspecting stack traces and source code in the browser with an interactive interpreter for any frame in the stack.
- A full-featured request object with objects to interact with headers, query args, form data, files, and cookies.
- A response object that can wrap other WSGI applications and handle streaming data.
- A routing system for matching URLs to endpoints and generating URLs for endpoints, with an extensible system for capturing variables from URLs.
- HTTP utilities to handle entity tags, cache control, dates, user agents, cookies, files, and more.
- A threaded WSGI server for use while developing applications locally.
- A test client for simulating HTTP requests during testing without requiring running a server.
Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up to the developer to choose a template engine, database adapter, and even how to handle requests. It can be used to build all sorts of end user applications such as blogs, wikis, or bulletin boards.
Flask wraps Werkzeug, using it to handle the details of WSGI while providing more structure and patterns for defining powerful applications.
Install and update using pip:
pip install -U Werkzeug
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
return Response('Hello, World!')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application)
- Website: https://www.palletsprojects.com/p/werkzeug/
- Releases: https://pypi.org/project/Werkzeug/
- Code: https://github.com/pallets/werkzeug
- Issue tracker: https://github.com/pallets/werkzeug/issues
- Test status:
- Linux, Mac: https://travis-ci.org/pallets/werkzeug
- Windows: https://ci.appveyor.com/project/davidism/werkzeug
- Test coverage: https://codecov.io/gh/pallets/werkzeug
pip |
Copyright © 2007 by the Pallets team. | |
Some rights reserved. | |
Redistribution and use in source and binary forms, with or without | |
modification, are permitted provided that the following conditions are | |
met: | |
* Redistributions of source code must retain the above copyright notice, | |
this list of conditions and the following disclaimer. | |
* Redistributions in binary form must reproduce the above copyright | |
notice, this list of conditions and the following disclaimer in the | |
documentation and/or other materials provided with the distribution. | |
* Neither the name of the copyright holder nor the names of its | |
contributors may be used to endorse or promote products derived from | |
this software without specific prior written permission. | |
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND | |
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, | |
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND | |
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE | |
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, | |
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT | |
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF | |
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | |
THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF | |
SUCH DAMAGE. |
Metadata-Version: 2.0 | |
Name: Werkzeug | |
Version: 0.14.1 | |
Summary: The comprehensive WSGI web application library. | |
Home-page: https://www.palletsprojects.org/p/werkzeug/ | |
Author: Armin Ronacher | |
Author-email: armin.ronacher@active-4.com | |
License: BSD | |
Description-Content-Type: UNKNOWN | |
Platform: any | |
Classifier: Development Status :: 5 - Production/Stable | |
Classifier: Environment :: Web Environment | |
Classifier: Intended Audience :: Developers | |
Classifier: License :: OSI Approved :: BSD License | |
Classifier: Operating System :: OS Independent | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 2 | |
Classifier: Programming Language :: Python :: 2.6 | |
Classifier: Programming Language :: Python :: 2.7 | |
Classifier: Programming Language :: Python :: 3 | |
Classifier: Programming Language :: Python :: 3.3 | |
Classifier: Programming Language :: Python :: 3.4 | |
Classifier: Programming Language :: Python :: 3.5 | |
Classifier: Programming Language :: Python :: 3.6 | |
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content | |
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |
Provides-Extra: dev | |
Requires-Dist: coverage; extra == 'dev' | |
Requires-Dist: pytest; extra == 'dev' | |
Requires-Dist: sphinx; extra == 'dev' | |
Requires-Dist: tox; extra == 'dev' | |
Provides-Extra: termcolor | |
Requires-Dist: termcolor; extra == 'termcolor' | |
Provides-Extra: watchdog | |
Requires-Dist: watchdog; extra == 'watchdog' | |
Werkzeug | |
======== | |
Werkzeug is a comprehensive `WSGI`_ web application library. It began as | |
a simple collection of various utilities for WSGI applications and has | |
become one of the most advanced WSGI utility libraries. | |
It includes: | |
* An interactive debugger that allows inspecting stack traces and source | |
code in the browser with an interactive interpreter for any frame in | |
the stack. | |
* A full-featured request object with objects to interact with headers, | |
query args, form data, files, and cookies. | |
* A response object that can wrap other WSGI applications and handle | |
streaming data. | |
* A routing system for matching URLs to endpoints and generating URLs | |
for endpoints, with an extensible system for capturing variables from | |
URLs. | |
* HTTP utilities to handle entity tags, cache control, dates, user | |
agents, cookies, files, and more. | |
* A threaded WSGI server for use while developing applications locally. | |
* A test client for simulating HTTP requests during testing without | |
requiring running a server. | |
Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up | |
to the developer to choose a template engine, database adapter, and even | |
how to handle requests. It can be used to build all sorts of end user | |
applications such as blogs, wikis, or bulletin boards. | |
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while | |
providing more structure and patterns for defining powerful | |
applications. | |
Installing | |
---------- | |
Install and update using `pip`_: | |
.. code-block:: text | |
pip install -U Werkzeug | |
A Simple Example | |
---------------- | |
.. code-block:: python | |
from werkzeug.wrappers import Request, Response | |
@Request.application | |
def application(request): | |
return Response('Hello, World!') | |
if __name__ == '__main__': | |
from werkzeug.serving import run_simple | |
run_simple('localhost', 4000, application) | |
Links | |
----- | |
* Website: https://www.palletsprojects.com/p/werkzeug/ | |
* Releases: https://pypi.org/project/Werkzeug/ | |
* Code: https://github.com/pallets/werkzeug | |
* Issue tracker: https://github.com/pallets/werkzeug/issues | |
* Test status: | |
* Linux, Mac: https://travis-ci.org/pallets/werkzeug | |
* Windows: https://ci.appveyor.com/project/davidism/werkzeug | |
* Test coverage: https://codecov.io/gh/pallets/werkzeug | |
.. _WSGI: https://wsgi.readthedocs.io/en/latest/ | |
.. _Flask: https://www.palletsprojects.com/p/flask/ | |
.. _pip: https://pip.pypa.io/en/stable/quickstart/ | |
{"generator": "bdist_wheel (0.26.0)", "summary": "The comprehensive WSGI web application library.", "classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"project_urls": {"Home": "https://www.palletsprojects.org/p/werkzeug/"}, "contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}}}, "license": "BSD", "metadata_version": "2.0", "name": "Werkzeug", "platform": "any", "extras": ["dev", "termcolor", "watchdog"], "run_requires": [{"requires": ["coverage", "pytest", "sphinx", "tox"], "extra": "dev"}, {"requires": ["termcolor"], "extra": "termcolor"}, {"requires": ["watchdog"], "extra": "watchdog"}], "version": "0.14.1"} |
Werkzeug-0.14.1.dist-info/DESCRIPTION.rst,sha256=rOCN36jwsWtWsTpqPG96z7FMilB5qI1CIARSKRuUmz8,2452 | |
Werkzeug-0.14.1.dist-info/LICENSE.txt,sha256=xndz_dD4m269AF9l_Xbl5V3tM1N3C1LoZC2PEPxWO-8,1534 | |
Werkzeug-0.14.1.dist-info/METADATA,sha256=FbfadrPdJNUWAxMOKxGUtHe5R3IDSBKYYmAz3FvI3uY,3872 | |
Werkzeug-0.14.1.dist-info/RECORD,, | |
Werkzeug-0.14.1.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 | |
Werkzeug-0.14.1.dist-info/metadata.json,sha256=4489UTt6HBp2NQil95-pBkjU4Je93SMHvMxZ_rjOpqA,1452 | |
Werkzeug-0.14.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 | |
werkzeug/__init__.py,sha256=NR0d4n_-U9BLVKlOISean3zUt2vBwhvK-AZE6M0sC0k,6842 | |
werkzeug/_compat.py,sha256=8c4U9o6A_TR9nKCcTbpZNxpqCXcXDVIbFawwKM2s92c,6311 | |
werkzeug/_internal.py,sha256=GhEyGMlsSz_tYjsDWO9TG35VN7304MM8gjKDrXLEdVc,13873 | |
werkzeug/_reloader.py,sha256=AyPphcOHPbu6qzW0UbrVvTDJdre5WgpxbhIJN_TqzUc,9264 | |
werkzeug/datastructures.py,sha256=3IgNKNqrz-ZjmAG7y3YgEYK-enDiMT_b652PsypWcYg,90080 | |
werkzeug/exceptions.py,sha256=3wp95Hqj9FqV8MdikV99JRcHse_fSMn27V8tgP5Hw2c,20505 | |
werkzeug/filesystem.py,sha256=hHWeWo_gqLMzTRfYt8-7n2wWcWUNTnDyudQDLOBEICE,2175 | |
werkzeug/formparser.py,sha256=mUuCwjzjb8_E4RzrAT2AioLuZSYpqR1KXTK6LScRYzA,21722 | |
werkzeug/http.py,sha256=RQg4MJuhRv2isNRiEh__Phh09ebpfT3Kuu_GfrZ54_c,40079 | |
werkzeug/local.py,sha256=QdQhWV5L8p1Y1CJ1CDStwxaUs24SuN5aebHwjVD08C8,14553 | |
werkzeug/posixemulation.py,sha256=xEF2Bxc-vUCPkiu4IbfWVd3LW7DROYAT-ExW6THqyzw,3519 | |
werkzeug/routing.py,sha256=2JVtdSgxKGeANy4Z_FP-dKESvKtkYGCZ1J2fARCLGCY,67214 | |
werkzeug/script.py,sha256=DwaVDcXdaOTffdNvlBdLitxWXjKaRVT32VbhDtljFPY,11365 | |
werkzeug/security.py,sha256=0m107exslz4QJLWQCpfQJ04z3re4eGHVggRvrQVAdWc,9193 | |
werkzeug/serving.py,sha256=A0flnIJHufdn2QJ9oeuHfrXwP3LzP8fn3rNW6hbxKUg,31926 | |
werkzeug/test.py,sha256=XmECSmnpASiYQTct4oMiWr0LT5jHWCtKqnpYKZd2ui8,36100 | |
werkzeug/testapp.py,sha256=3HQRW1sHZKXuAjCvFMet4KXtQG3loYTFnvn6LWt-4zI,9396 | |
werkzeug/urls.py,sha256=dUeLg2IeTm0WLmSvFeD4hBZWGdOs-uHudR5-t8n9zPo,36771 | |
werkzeug/useragents.py,sha256=BhYMf4cBTHyN4U0WsQedePIocmNlH_34C-UwqSThGCc,5865 | |
werkzeug/utils.py,sha256=BrY1j0DHQ8RTb0K1StIobKuMJhN9SQQkWEARbrh2qpk,22972 | |
werkzeug/websocket.py,sha256=PpSeDxXD_0UsPAa5hQhQNM6mxibeUgn8lA8eRqiS0vM,11344 | |
werkzeug/wrappers.py,sha256=kbyL_aFjxELwPgMwfNCYjKu-CR6kNkh-oO8wv3GXbk8,84511 | |
werkzeug/wsgi.py,sha256=1Nob-aeChWQf7MsiicO8RZt6J90iRzEcik44ev9Qu8s,49347 | |
werkzeug/contrib/__init__.py,sha256=f7PfttZhbrImqpr5Ezre8CXgwvcGUJK7zWNpO34WWrw,623 | |
werkzeug/contrib/atom.py,sha256=qqfJcfIn2RYY-3hO3Oz0aLq9YuNubcPQ_KZcNsDwVJo,15575 | |
werkzeug/contrib/cache.py,sha256=xBImHNj09BmX_7kC5NUCx8f_l4L8_O7zi0jCL21UZKE,32163 | |
werkzeug/contrib/fixers.py,sha256=gR06T-w71ur-tHQ_31kP_4jpOncPJ4Wc1dOqTvYusr8,10179 | |
werkzeug/contrib/iterio.py,sha256=RlqDvGhz0RneTpzE8dVc-yWCUv4nkPl1jEc_EDp2fH0,10814 | |
werkzeug/contrib/jsrouting.py,sha256=QTmgeDoKXvNK02KzXgx9lr3cAH6fAzpwF5bBdPNvJPs,8564 | |
werkzeug/contrib/limiter.py,sha256=iS8-ahPZ-JLRnmfIBzxpm7O_s3lPsiDMVWv7llAIDCI,1334 | |
werkzeug/contrib/lint.py,sha256=Mj9NeUN7s4zIUWeQOAVjrmtZIcl3Mm2yDe9BSIr9YGE,12558 | |
werkzeug/contrib/profiler.py,sha256=ISwCWvwVyGpDLRBRpLjo_qUWma6GXYBrTAco4PEQSHY,5151 | |
werkzeug/contrib/securecookie.py,sha256=uWMyHDHY3lkeBRiCSayGqWkAIy4a7xAbSE_Hln9ecqc,12196 | |
werkzeug/contrib/sessions.py,sha256=39LVNvLbm5JWpbxM79WC2l87MJFbqeISARjwYbkJatw,12577 | |
werkzeug/contrib/testtools.py,sha256=G9xN-qeihJlhExrIZMCahvQOIDxdL9NiX874jiiHFMs,2453 | |
werkzeug/contrib/wrappers.py,sha256=v7OYlz7wQtDlS9fey75UiRZ1IkUWqCpzbhsLy4k14Hw,10398 | |
werkzeug/debug/__init__.py,sha256=uSn9BqCZ5E3ySgpoZtundpROGsn-uYvZtSFiTfAX24M,17452 | |
werkzeug/debug/console.py,sha256=n3-dsKk1TsjnN-u4ZgmuWCU_HO0qw5IA7ttjhyyMM6I,5607 | |
werkzeug/debug/repr.py,sha256=bKqstDYGfECpeLerd48s_hxuqK4b6UWnjMu3d_DHO8I,9340 | |
werkzeug/debug/tbtools.py,sha256=rBudXCmkVdAKIcdhxANxgf09g6kQjJWW9_5bjSpr4OY,18451 | |
werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 | |
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 | |
werkzeug/debug/shared/debugger.js,sha256=PKPVYuyO4SX1hkqLOwCLvmIEO5154WatFYaXE-zIfKI,6264 | |
werkzeug/debug/shared/jquery.js,sha256=7LkWEzqTdpEfELxcZZlS6wAx5Ff13zZ83lYO2_ujj7g,95957 | |
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 | |
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 | |
werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 | |
werkzeug/debug/shared/style.css,sha256=IEO0PC2pWmh2aEyGCaN--txuWsRCliuhlbEhPDFwh0A,6270 | |
werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 | |
Werkzeug-0.14.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |
werkzeug/contrib/__pycache__/atom.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/cache.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/fixers.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/iterio.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/jsrouting.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/limiter.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/lint.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/profiler.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/securecookie.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/sessions.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/testtools.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/wrappers.cpython-37.pyc,, | |
werkzeug/contrib/__pycache__/__init__.cpython-37.pyc,, | |
werkzeug/debug/__pycache__/console.cpython-37.pyc,, | |
werkzeug/debug/__pycache__/repr.cpython-37.pyc,, | |
werkzeug/debug/__pycache__/tbtools.cpython-37.pyc,, | |
werkzeug/debug/__pycache__/__init__.cpython-37.pyc,, | |
werkzeug/__pycache__/datastructures.cpython-37.pyc,, | |
werkzeug/__pycache__/exceptions.cpython-37.pyc,, | |
werkzeug/__pycache__/filesystem.cpython-37.pyc,, | |
werkzeug/__pycache__/formparser.cpython-37.pyc,, | |
werkzeug/__pycache__/http.cpython-37.pyc,, | |
werkzeug/__pycache__/local.cpython-37.pyc,, | |
werkzeug/__pycache__/posixemulation.cpython-37.pyc,, | |
werkzeug/__pycache__/routing.cpython-37.pyc,, | |
werkzeug/__pycache__/script.cpython-37.pyc,, | |
werkzeug/__pycache__/security.cpython-37.pyc,, | |
werkzeug/__pycache__/serving.cpython-37.pyc,, | |
werkzeug/__pycache__/test.cpython-37.pyc,, | |
werkzeug/__pycache__/testapp.cpython-37.pyc,, | |
werkzeug/__pycache__/urls.cpython-37.pyc,, | |
werkzeug/__pycache__/useragents.cpython-37.pyc,, | |
werkzeug/__pycache__/utils.cpython-37.pyc,, | |
werkzeug/__pycache__/websocket.cpython-37.pyc,, | |
werkzeug/__pycache__/wrappers.cpython-37.pyc,, | |
werkzeug/__pycache__/wsgi.cpython-37.pyc,, | |
werkzeug/__pycache__/_compat.cpython-37.pyc,, | |
werkzeug/__pycache__/_internal.cpython-37.pyc,, | |
werkzeug/__pycache__/_reloader.cpython-37.pyc,, | |
werkzeug/__pycache__/__init__.cpython-37.pyc,, |
werkzeug |
Wheel-Version: 1.0 | |
Generator: bdist_wheel (0.26.0) | |
Root-Is-Purelib: true | |
Tag: py2-none-any | |
Tag: py3-none-any | |
# -*- coding: utf-8 -*- | |
""" | |
jinja2.nodes | |
~~~~~~~~~~~~ | |
This module implements additional nodes derived from the ast base node. | |
It also provides some node tree helper functions like `in_lineno` and | |
`get_nodes` used by the parser and translator in order to normalize | |
python and jinja nodes. | |
:copyright: (c) 2017 by the Jinja Team. | |
:license: BSD, see LICENSE for more details. | |
""" | |
import types | |
import operator | |
from collections import deque | |
from jinja2.utils import Markup | |
from jinja2._compat import izip, with_metaclass, text_type, PY2 | |
#: the types we support for context functions | |
_context_function_types = (types.FunctionType, types.MethodType) | |
_binop_to_func = { | |
'*': operator.mul, | |
'/': operator.truediv, | |
'//': operator.floordiv, | |
'**': operator.pow, | |
'%': operator.mod, | |
'+': operator.add, | |
'-': operator.sub | |
} | |
_uaop_to_func = { | |
'not': operator.not_, | |
'+': operator.pos, | |
'-': operator.neg | |
} | |
_cmpop_to_func = { | |
'eq': operator.eq, | |
'ne': operator.ne, | |
'gt': operator.gt, | |
'gteq': operator.ge, | |
'lt': operator.lt, | |
'lteq': operator.le, | |
'in': lambda a, b: a in b, | |
'notin': lambda a, b: a not in b | |
} | |
class Impossible(Exception): | |
"""Raised if the node could not perform a requested action.""" | |
class NodeType(type): | |
"""A metaclass for nodes that handles the field and attribute | |
inheritance. fields and attributes from the parent class are | |
automatically forwarded to the child.""" | |
def __new__(cls, name, bases, d): | |
for attr in 'fields', 'attributes': | |
storage = [] | |
storage.extend(getattr(bases[0], attr, ())) | |
storage.extend(d.get(attr, ())) | |
assert len(bases) == 1, 'multiple inheritance not allowed' | |
assert len(storage) == len(set(storage)), 'layout conflict' | |
d[attr] = tuple(storage) | |
d.setdefault('abstract', False) | |
return type.__new__(cls, name, bases, d) | |
class EvalContext(object): | |
"""Holds evaluation time information. Custom attributes can be attached | |
to it in extensions. | |
""" | |
def __init__(self, environment, template_name=None): | |
self.environment = environment | |
if callable(environment.autoescape): | |
self.autoescape = environment.autoescape(template_name) | |
else: | |
self.autoescape = environment.autoescape | |
self.volatile = False | |
def save(self): | |
return self.__dict__.copy() | |
def revert(self, old): | |
self.__dict__.clear() | |
self.__dict__.update(old) | |
def get_eval_context(node, ctx): | |
if ctx is None: | |
if node.environment is None: | |
raise RuntimeError('if no eval context is passed, the ' | |
'node must have an attached ' | |
'environment.') | |
return EvalContext(node.environment) | |
return ctx | |
class Node(with_metaclass(NodeType, object)): | |
"""Baseclass for all Jinja2 nodes. There are a number of nodes available | |
of different types. There are four major types: | |
- :class:`Stmt`: statements | |
- :class:`Expr`: expressions | |
- :class:`Helper`: helper nodes | |
- :class:`Template`: the outermost wrapper node | |
All nodes have fields and attributes. Fields may be other nodes, lists, | |
or arbitrary values. Fields are passed to the constructor as regular | |
positional arguments, attributes as keyword arguments. Each node has | |
two attributes: `lineno` (the line number of the node) and `environment`. | |
The `environment` attribute is set at the end of the parsing process for | |
all nodes automatically. | |
""" | |
fields = () | |
attributes = ('lineno', 'environment') | |
abstract = True | |
def __init__(self, *fields, **attributes): | |
if self.abstract: | |
raise TypeError('abstract nodes are not instanciable') | |
if fields: | |
if len(fields) != len(self.fields): | |
if not self.fields: | |
raise TypeError('%r takes 0 arguments' % | |
self.__class__.__name__) | |
raise TypeError('%r takes 0 or %d argument%s' % ( | |
self.__class__.__name__, | |
len(self.fields), | |
len(self.fields) != 1 and 's' or '' | |
)) | |
for name, arg in izip(self.fields, fields): | |
setattr(self, name, arg) | |
for attr in self.attributes: | |
setattr(self, attr, attributes.pop(attr, None)) | |
if attributes: | |
raise TypeError('unknown attribute %r' % | |
next(iter(attributes))) | |
def iter_fields(self, exclude=None, only=None): | |
"""This method iterates over all fields that are defined and yields | |
``(key, value)`` tuples. Per default all fields are returned, but | |
it's possible to limit that to some fields by providing the `only` | |
parameter or to exclude some using the `exclude` parameter. Both | |
should be sets or tuples of field names. | |
""" | |
for name in self.fields: | |
if (exclude is only is None) or \ | |
(exclude is not None and name not in exclude) or \ | |
(only is not None and name in only): | |
try: | |
yield name, getattr(self, name) | |
except AttributeError: | |
pass | |
def iter_child_nodes(self, exclude=None, only=None): | |
"""Iterates over all direct child nodes of the node. This iterates | |
over all fields and yields the values of they are nodes. If the value | |
of a field is a list all the nodes in that list are returned. | |
""" | |
for field, item in self.iter_fields(exclude, only): | |
if isinstance(item, list): | |
for n in item: | |
if isinstance(n, Node): | |
yield n | |
elif isinstance(item, Node): | |
yield item | |
def find(self, node_type): | |
"""Find the first node of a given type. If no such node exists the | |
return value is `None`. | |
""" | |
for result in self.find_all(node_type): | |
return result | |
def find_all(self, node_type): | |
"""Find all the nodes of a given type. If the type is a tuple, | |
the check is performed for any of the tuple items. | |
""" | |
for child in self.iter_child_nodes(): | |
if isinstance(child, node_type): | |
yield child | |
for result in child.find_all(node_type): | |
yield result | |
def set_ctx(self, ctx): | |
"""Reset the context of a node and all child nodes. Per default the | |
parser will all generate nodes that have a 'load' context as it's the | |
most common one. This method is used in the parser to set assignment | |
targets and other nodes to a store context. | |
""" | |
todo = deque([self]) | |
while todo: | |
node = todo.popleft() | |
if 'ctx' in node.fields: | |
node.ctx = ctx | |
todo.extend(node.iter_child_nodes()) | |
return self | |
def set_lineno(self, lineno, override=False): | |
"""Set the line numbers of the node and children.""" | |
todo = deque([self]) | |
while todo: | |
node = todo.popleft() | |
if 'lineno' in node.attributes: | |
if node.lineno is None or override: | |
node.lineno = lineno | |
todo.extend(node.iter_child_nodes()) | |
return self | |
def set_environment(self, environment): | |
"""Set the environment for all nodes.""" | |
todo = deque([self]) | |
while todo: | |
node = todo.popleft() | |
node.environment = environment | |
todo.extend(node.iter_child_nodes()) | |
return self | |
def __eq__(self, other): | |
return type(self) is type(other) and \ | |
tuple(self.iter_fields()) == tuple(other.iter_fields()) | |
def __ne__(self, other): | |
return not self.__eq__(other) | |
# Restore Python 2 hashing behavior on Python 3 | |
__hash__ = object.__hash__ | |
def __repr__(self): | |
return '%s(%s)' % ( | |
self.__class__.__name__, | |
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for | |
arg in self.fields) | |
) | |
def dump(self): | |
def _dump(node): | |
if not isinstance(node, Node): | |
buf.append(repr(node)) | |
return | |
buf.append('nodes.%s(' % node.__class__.__name__) | |
if not node.fields: | |
buf.append(')') | |
return | |
for idx, field in enumerate(node.fields): | |
if idx: | |
buf.append(', ') | |
value = getattr(node, field) | |
if isinstance(value, list): | |
buf.append('[') | |
for idx, item in enumerate(value): | |
if idx: | |
buf.append(', ') | |
_dump(item) | |
buf.append(']') | |
else: | |
_dump(value) | |
buf.append(')') | |
buf = [] | |
_dump(self) | |
return ''.join(buf) | |
class Stmt(Node): | |
"""Base node for all statements.""" | |
abstract = True | |
class Helper(Node): | |
"""Nodes that exist in a specific context only.""" | |
abstract = True | |
class Template(Node): | |
"""Node that represents a template. This must be the outermost node that | |
is passed to the compiler. | |
""" | |
fields = ('body',) | |
class Output(Stmt): | |
"""A node that holds multiple expressions which are then printed out. | |
This is used both for the `print` statement and the regular template data. | |
""" | |
fields = ('nodes',) | |
class Extends(Stmt): | |
"""Represents an extends statement.""" | |
fields = ('template',) | |
class For(Stmt): | |
"""The for loop. `target` is the target for the iteration (usually a | |
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list | |
of nodes that are used as loop-body, and `else_` a list of nodes for the | |
`else` block. If no else node exists it has to be an empty list. | |
For filtered nodes an expression can be stored as `test`, otherwise `None`. | |
""" | |
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive') | |
class If(Stmt): | |
"""If `test` is true, `body` is rendered, else `else_`.""" | |
fields = ('test', 'body', 'elif_', 'else_') | |
class Macro(Stmt): | |
"""A macro definition. `name` is the name of the macro, `args` a list of | |
arguments and `defaults` a list of defaults if there are any. `body` is | |
a list of nodes for the macro body. | |
""" | |
fields = ('name', 'args', 'defaults', 'body') | |
class CallBlock(Stmt): | |
"""Like a macro without a name but a call instead. `call` is called with | |
the unnamed macro as `caller` argument this node holds. | |
""" | |
fields = ('call', 'args', 'defaults', 'body') | |
class FilterBlock(Stmt): | |
"""Node for filter sections.""" | |
fields = ('body', 'filter') | |
class With(Stmt): | |
"""Specific node for with statements. In older versions of Jinja the | |
with statement was implemented on the base of the `Scope` node instead. | |
.. versionadded:: 2.9.3 | |
""" | |
fields = ('targets', 'values', 'body') | |
class Block(Stmt): | |
"""A node that represents a block.""" | |
fields = ('name', 'body', 'scoped') | |
class Include(Stmt): | |
"""A node that represents the include tag.""" | |
fields = ('template', 'with_context', 'ignore_missing') | |
class Import(Stmt): | |
"""A node that represents the import tag.""" | |
fields = ('template', 'target', 'with_context') | |
class FromImport(Stmt): | |
"""A node that represents the from import tag. It's important to not | |
pass unsafe names to the name attribute. The compiler translates the | |
attribute lookups directly into getattr calls and does *not* use the | |
subscript callback of the interface. As exported variables may not | |
start with double underscores (which the parser asserts) this is not a | |
problem for regular Jinja code, but if this node is used in an extension | |
extra care must be taken. | |
The list of names may contain tuples if aliases are wanted. | |
""" | |
fields = ('template', 'names', 'with_context') | |
class ExprStmt(Stmt): | |
"""A statement that evaluates an expression and discards the result.""" | |
fields = ('node',) | |
class Assign(Stmt): | |
"""Assigns an expression to a target.""" | |
fields = ('target', 'node') | |
class AssignBlock(Stmt): | |
"""Assigns a block to a target.""" | |
fields = ('target', 'filter', 'body') | |
class Expr(Node): | |
"""Baseclass for all expressions.""" | |
abstract = True | |
def as_const(self, eval_ctx=None): | |
"""Return the value of the expression as constant or raise | |
:exc:`Impossible` if this was not possible. | |
An :class:`EvalContext` can be provided, if none is given | |
a default context is created which requires the nodes to have | |
an attached environment. | |
.. versionchanged:: 2.4 | |
the `eval_ctx` parameter was added. | |
""" | |
raise Impossible() | |
def can_assign(self): | |
"""Check if it's possible to assign something to this node.""" | |
return False | |
class BinExpr(Expr): | |
"""Baseclass for all binary expressions.""" | |
fields = ('left', 'right') | |
operator = None | |
abstract = True | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
# intercepted operators cannot be folded at compile time | |
if self.environment.sandboxed and \ | |
self.operator in self.environment.intercepted_binops: | |
raise Impossible() | |
f = _binop_to_func[self.operator] | |
try: | |
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx)) | |
except Exception: | |
raise Impossible() | |
class UnaryExpr(Expr): | |
"""Baseclass for all unary expressions.""" | |
fields = ('node',) | |
operator = None | |
abstract = True | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
# intercepted operators cannot be folded at compile time | |
if self.environment.sandboxed and \ | |
self.operator in self.environment.intercepted_unops: | |
raise Impossible() | |
f = _uaop_to_func[self.operator] | |
try: | |
return f(self.node.as_const(eval_ctx)) | |
except Exception: | |
raise Impossible() | |
class Name(Expr): | |
"""Looks up a name or stores a value in a name. | |
The `ctx` of the node can be one of the following values: | |
- `store`: store a value in the name | |
- `load`: load that name | |
- `param`: like `store` but if the name was defined as function parameter. | |
""" | |
fields = ('name', 'ctx') | |
def can_assign(self): | |
return self.name not in ('true', 'false', 'none', | |
'True', 'False', 'None') | |
class NSRef(Expr): | |
"""Reference to a namespace value assignment""" | |
fields = ('name', 'attr') | |
def can_assign(self): | |
# We don't need any special checks here; NSRef assignments have a | |
# runtime check to ensure the target is a namespace object which will | |
# have been checked already as it is created using a normal assignment | |
# which goes through a `Name` node. | |
return True | |
class Literal(Expr): | |
"""Baseclass for literals.""" | |
abstract = True | |
class Const(Literal): | |
"""All constant values. The parser will return this node for simple | |
constants such as ``42`` or ``"foo"`` but it can be used to store more | |
complex values such as lists too. Only constants with a safe | |
representation (objects where ``eval(repr(x)) == x`` is true). | |
""" | |
fields = ('value',) | |
def as_const(self, eval_ctx=None): | |
rv = self.value | |
if PY2 and type(rv) is text_type and \ | |
self.environment.policies['compiler.ascii_str']: | |
try: | |
rv = rv.encode('ascii') | |
except UnicodeError: | |
pass | |
return rv | |
@classmethod | |
def from_untrusted(cls, value, lineno=None, environment=None): | |
"""Return a const object if the value is representable as | |
constant value in the generated code, otherwise it will raise | |
an `Impossible` exception. | |
""" | |
from .compiler import has_safe_repr | |
if not has_safe_repr(value): | |
raise Impossible() | |
return cls(value, lineno=lineno, environment=environment) | |
class TemplateData(Literal): | |
"""A constant template string.""" | |
fields = ('data',) | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
if eval_ctx.volatile: | |
raise Impossible() | |
if eval_ctx.autoescape: | |
return Markup(self.data) | |
return self.data | |
class Tuple(Literal): | |
"""For loop unpacking and some other things like multiple arguments | |
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple | |
is used for loading the names or storing. | |
""" | |
fields = ('items', 'ctx') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return tuple(x.as_const(eval_ctx) for x in self.items) | |
def can_assign(self): | |
for item in self.items: | |
if not item.can_assign(): | |
return False | |
return True | |
class List(Literal): | |
"""Any list literal such as ``[1, 2, 3]``""" | |
fields = ('items',) | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return [x.as_const(eval_ctx) for x in self.items] | |
class Dict(Literal): | |
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of | |
:class:`Pair` nodes. | |
""" | |
fields = ('items',) | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return dict(x.as_const(eval_ctx) for x in self.items) | |
class Pair(Helper): | |
"""A key, value pair for dicts.""" | |
fields = ('key', 'value') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx) | |
class Keyword(Helper): | |
"""A key, value pair for keyword arguments where key is a string.""" | |
fields = ('key', 'value') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return self.key, self.value.as_const(eval_ctx) | |
class CondExpr(Expr): | |
"""A conditional expression (inline if expression). (``{{ | |
foo if bar else baz }}``) | |
""" | |
fields = ('test', 'expr1', 'expr2') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
if self.test.as_const(eval_ctx): | |
return self.expr1.as_const(eval_ctx) | |
# if we evaluate to an undefined object, we better do that at runtime | |
if self.expr2 is None: | |
raise Impossible() | |
return self.expr2.as_const(eval_ctx) | |
def args_as_const(node, eval_ctx): | |
args = [x.as_const(eval_ctx) for x in node.args] | |
kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs) | |
if node.dyn_args is not None: | |
try: | |
args.extend(node.dyn_args.as_const(eval_ctx)) | |
except Exception: | |
raise Impossible() | |
if node.dyn_kwargs is not None: | |
try: | |
kwargs.update(node.dyn_kwargs.as_const(eval_ctx)) | |
except Exception: | |
raise Impossible() | |
return args, kwargs | |
class Filter(Expr): | |
"""This node applies a filter on an expression. `name` is the name of | |
the filter, the rest of the fields are the same as for :class:`Call`. | |
If the `node` of a filter is `None` the contents of the last buffer are | |
filtered. Buffers are created by macros and filter blocks. | |
""" | |
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
if eval_ctx.volatile or self.node is None: | |
raise Impossible() | |
# we have to be careful here because we call filter_ below. | |
# if this variable would be called filter, 2to3 would wrap the | |
# call in a list beause it is assuming we are talking about the | |
# builtin filter function here which no longer returns a list in | |
# python 3. because of that, do not rename filter_ to filter! | |
filter_ = self.environment.filters.get(self.name) | |
if filter_ is None or getattr(filter_, 'contextfilter', False): | |
raise Impossible() | |
# We cannot constant handle async filters, so we need to make sure | |
# to not go down this path. | |
if ( | |
eval_ctx.environment.is_async | |
and getattr(filter_, 'asyncfiltervariant', False) | |
): | |
raise Impossible() | |
args, kwargs = args_as_const(self, eval_ctx) | |
args.insert(0, self.node.as_const(eval_ctx)) | |
if getattr(filter_, 'evalcontextfilter', False): | |
args.insert(0, eval_ctx) | |
elif getattr(filter_, 'environmentfilter', False): | |
args.insert(0, self.environment) | |
try: | |
return filter_(*args, **kwargs) | |
except Exception: | |
raise Impossible() | |
class Test(Expr): | |
"""Applies a test on an expression. `name` is the name of the test, the | |
rest of the fields are the same as for :class:`Call`. | |
""" | |
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs') | |
def as_const(self, eval_ctx=None): | |
test = self.environment.tests.get(self.name) | |
if test is None: | |
raise Impossible() | |
eval_ctx = get_eval_context(self, eval_ctx) | |
args, kwargs = args_as_const(self, eval_ctx) | |
args.insert(0, self.node.as_const(eval_ctx)) | |
try: | |
return test(*args, **kwargs) | |
except Exception: | |
raise Impossible() | |
class Call(Expr): | |
"""Calls an expression. `args` is a list of arguments, `kwargs` a list | |
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args` | |
and `dyn_kwargs` has to be either `None` or a node that is used as | |
node for dynamic positional (``*args``) or keyword (``**kwargs``) | |
arguments. | |
""" | |
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs') | |
class Getitem(Expr): | |
"""Get an attribute or item from an expression and prefer the item.""" | |
fields = ('node', 'arg', 'ctx') | |
def as_const(self, eval_ctx=None): | |
eval_ctx = get_eval_context(self, eval_ctx) | |
if self.ctx != 'load': | |
raise Impossible() | |
try: | |
return self.environment.getitem(self.node.as_const(eval_ctx), | |
self.arg.as_const(eval_ctx)) | |
except Exception: | |
raise Impossible() | |
def can_assign(self): | |
return False | |
class Getattr(Expr): | |
"""Get an attribute or item from an expression that is a ascii-only | |
bytestring and prefer the attribute. | |
""" | |
fields = ('node', 'attr', 'ctx') | |
def as_const(self, eval_ctx=None): | |
if self.ctx != 'load': | |
raise Impossible() | |
try: | |
eval_ctx = get_eval_context(self, eval_ctx) | |
return self.environment.getattr(self.node.as_const(eval_ctx), | |
self.attr) | |
except Exception: | |
raise Impossible() | |
def can_assign(self): | |
return False | |
class Slice(Expr): | |
"""Represents a slice object. This must only be used as argument for | |
:class:`Subscript`. | |
""" | |
field |
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)