diff --git a/flask/__pycache__/helloworld.cpython-36.pyc b/flask/__pycache__/helloworld.cpython-36.pyc new file mode 100644 index 0000000..76ae0c2 Binary files /dev/null and b/flask/__pycache__/helloworld.cpython-36.pyc differ diff --git a/flask/helloworld.py b/flask/helloworld.py new file mode 100644 index 0000000..34741de --- /dev/null +++ b/flask/helloworld.py @@ -0,0 +1,5 @@ +from flask import Flask +application = Flask(__name__) +@application.route('/') +def helloworld(): + return 'Hello there, world!' diff --git a/flask/install-dependensies.sh b/flask/install-dependensies.sh new file mode 100644 index 0000000..e69de29 diff --git a/flask/venv/bin/activate b/flask/venv/bin/activate new file mode 100644 index 0000000..65b2465 --- /dev/null +++ b/flask/venv/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "$1" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/root/projekti/TeraHz/flask/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(venv) " != x ] ; then + PS1="(venv) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/flask/venv/bin/activate.csh b/flask/venv/bin/activate.csh new file mode 100644 index 0000000..8f97ac4 --- /dev/null +++ b/flask/venv/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/root/projekti/TeraHz/flask/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("venv" != "") then + set env_name = "venv" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/flask/venv/bin/activate.fish b/flask/venv/bin/activate.fish new file mode 100644 index 0000000..45f6f50 --- /dev/null +++ b/flask/venv/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/root/projekti/TeraHz/flask/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(venv) " + printf "%s%s" "(venv) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/flask/venv/bin/dotenv b/flask/venv/bin/dotenv new file mode 100755 index 0000000..3e09d5c --- /dev/null +++ b/flask/venv/bin/dotenv @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from dotenv.cli import cli + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/flask/venv/bin/easy_install b/flask/venv/bin/easy_install new file mode 100755 index 0000000..3e61814 --- /dev/null +++ b/flask/venv/bin/easy_install @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/easy_install-3.6 b/flask/venv/bin/easy_install-3.6 new file mode 100755 index 0000000..3e61814 --- /dev/null +++ b/flask/venv/bin/easy_install-3.6 @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/flask b/flask/venv/bin/flask new file mode 100755 index 0000000..511a9be --- /dev/null +++ b/flask/venv/bin/flask @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from flask.cli import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/pip b/flask/venv/bin/pip new file mode 100755 index 0000000..1660757 --- /dev/null +++ b/flask/venv/bin/pip @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/pip3 b/flask/venv/bin/pip3 new file mode 100755 index 0000000..1660757 --- /dev/null +++ b/flask/venv/bin/pip3 @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/pip3.6 b/flask/venv/bin/pip3.6 new file mode 100755 index 0000000..1660757 --- /dev/null +++ b/flask/venv/bin/pip3.6 @@ -0,0 +1,10 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/flask/venv/bin/python b/flask/venv/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/flask/venv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/flask/venv/bin/python3 b/flask/venv/bin/python3 new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/flask/venv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/flask/venv/bin/watchmedo b/flask/venv/bin/watchmedo new file mode 100755 index 0000000..8eddad5 --- /dev/null +++ b/flask/venv/bin/watchmedo @@ -0,0 +1,12 @@ +#!/root/projekti/TeraHz/flask/venv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'watchdog==0.9.0','console_scripts','watchmedo' +__requires__ = 'watchdog==0.9.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('watchdog==0.9.0', 'console_scripts', 'watchmedo')() + ) diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..87ce152 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt @@ -0,0 +1,39 @@ +Copyright © 2014 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms of the software as +well as documentation, with or without modification, are permitted +provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +- Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +---- + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright © 2001-2006 Gregory P. Ward. All rights reserved. +Copyright © 2002-2006 Python Software Foundation. All rights reserved. diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA new file mode 100644 index 0000000..625bdad --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA @@ -0,0 +1,121 @@ +Metadata-Version: 2.1 +Name: Click +Version: 7.0 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets Team +Maintainer-email: contact@palletsprojects.com +License: BSD +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/click +Project-URL: Issue tracker, https://github.com/pallets/click/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install click + +Click supports Python 3.4 and newer, Python 2.7, and PyPy. + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +A Simple Example +---------------- + +What does it look like? Here is an example of a simple Click program: + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", + help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo("Hello, %s!" % name) + + if __name__ == '__main__': + hello() + +And what it looks like when run: + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +* Website: https://palletsprojects.com/p/click/ +* Documentation: https://click.palletsprojects.com/ +* License: `BSD `_ +* Releases: https://pypi.org/project/click/ +* Code: https://github.com/pallets/click +* Issue tracker: https://github.com/pallets/click/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/click + * Windows: https://ci.appveyor.com/project/pallets/click + +* Test coverage: https://codecov.io/gh/pallets/click + + diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD new file mode 100644 index 0000000..b99c0be --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD @@ -0,0 +1,40 @@ +Click-7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Click-7.0.dist-info/LICENSE.txt,sha256=4hIxn676T0Wcisk3_chVcECjyrivKTZsoqSNI5AlIlw,1876 +Click-7.0.dist-info/METADATA,sha256=-r8jeke3Zer4diRvT1MjFZuiJ6yTT_qFP39svLqdaLI,3516 +Click-7.0.dist-info/RECORD,, +Click-7.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +Click-7.0.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=HjGThQ7tef9kkwCV371TBnrf0SAi6fKfU_jtEnbYTvQ,2789 +click/__pycache__/__init__.cpython-36.pyc,, +click/__pycache__/_bashcomplete.cpython-36.pyc,, +click/__pycache__/_compat.cpython-36.pyc,, +click/__pycache__/_termui_impl.cpython-36.pyc,, +click/__pycache__/_textwrap.cpython-36.pyc,, +click/__pycache__/_unicodefun.cpython-36.pyc,, +click/__pycache__/_winconsole.cpython-36.pyc,, +click/__pycache__/core.cpython-36.pyc,, +click/__pycache__/decorators.cpython-36.pyc,, +click/__pycache__/exceptions.cpython-36.pyc,, +click/__pycache__/formatting.cpython-36.pyc,, +click/__pycache__/globals.cpython-36.pyc,, +click/__pycache__/parser.cpython-36.pyc,, +click/__pycache__/termui.cpython-36.pyc,, +click/__pycache__/testing.cpython-36.pyc,, +click/__pycache__/types.cpython-36.pyc,, +click/__pycache__/utils.cpython-36.pyc,, +click/_bashcomplete.py,sha256=iaNUmtxag0YPfxba3TDYCNietiTMQIrvhRLj-H8okFU,11014 +click/_compat.py,sha256=vYmvoj4opPxo-c-2GMQQjYT_r_QkOKybkfGoeVrt0dA,23399 +click/_termui_impl.py,sha256=xHmLtOJhKUCVD6168yucJ9fknUJPAMs0eUTPgVUO-GQ,19611 +click/_textwrap.py,sha256=gwS4m7bdQiJnzaDG8osFcRb-5vn4t4l2qSCy-5csCEc,1198 +click/_unicodefun.py,sha256=QHy2_5jYlX-36O-JVrTHNnHOqg8tquUR0HmQFev7Ics,4364 +click/_winconsole.py,sha256=PPWVak8Iikm_gAPsxMrzwsVFCvHgaW3jPaDWZ1JBl3U,8965 +click/core.py,sha256=q8FLcDZsagBGSRe5Y9Hi_FGvAeZvusNfoO5EkhkSQ8Y,75305 +click/decorators.py,sha256=idKt6duLUUfAFftrHoREi8MJSd39XW36pUVHthdglwk,11226 +click/exceptions.py,sha256=CNpAjBAE7qjaV4WChxQeak95e5yUOau8AsvT-8m6wss,7663 +click/formatting.py,sha256=eh-cypTUAhpI3HD-K4ZpR3vCiURIO62xXvKkR3tNUTM,8889 +click/globals.py,sha256=oQkou3ZQ5DgrbVM6BwIBirwiqozbjfirzsLGAlLRRdg,1514 +click/parser.py,sha256=m-nGZz4VwprM42_qtFlWFGo7yRJQxkBlRcZodoH593Y,15510 +click/termui.py,sha256=o_ZXB2jyvL2Rce7P_bFGq452iyBq9ykJyRApIPMCZO0,23207 +click/testing.py,sha256=aYGqY_iWLu2p4k7lkuJ6t3fqpf6aPGqTsyLzNY_ngKg,13062 +click/types.py,sha256=2Q929p-aBP_ZYuMFJqJR-Ipucofv3fmDc5JzBDPmzJU,23287 +click/utils.py,sha256=6-D0WkAxvv9FkgHXSHwDIv0l9Gdx9Mm6Z5vuKNLIfZI,15763 diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt new file mode 100644 index 0000000..dca9a90 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/LICENSE.txt b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/LICENSE.txt new file mode 100644 index 0000000..8f9252f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/LICENSE.txt @@ -0,0 +1,31 @@ +Copyright © 2010 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms of the software as +well as documentation, with or without modification, are permitted +provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/METADATA new file mode 100644 index 0000000..c600e73 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/METADATA @@ -0,0 +1,130 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 1.0.2 +Summary: A simple framework for building complex web applications. +Home-page: https://www.palletsprojects.com/p/flask/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets team +Maintainer-email: contact@palletsprojects.com +License: BSD +Project-URL: Documentation, http://flask.pocoo.org/docs/ +Project-URL: Code, https://github.com/pallets/flask +Project-URL: Issue tracker, https://github.com/pallets/flask/issues +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: dev +Provides-Extra: docs +Provides-Extra: dotenv +Requires-Dist: Werkzeug (>=0.14) +Requires-Dist: Jinja2 (>=2.10) +Requires-Dist: itsdangerous (>=0.24) +Requires-Dist: click (>=5.1) +Provides-Extra: dev +Requires-Dist: pytest (>=3); extra == 'dev' +Requires-Dist: coverage; extra == 'dev' +Requires-Dist: tox; extra == 'dev' +Requires-Dist: sphinx; extra == 'dev' +Requires-Dist: pallets-sphinx-themes; extra == 'dev' +Requires-Dist: sphinxcontrib-log-cabinet; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx; extra == 'docs' +Requires-Dist: pallets-sphinx-themes; extra == 'docs' +Requires-Dist: sphinxcontrib-log-cabinet; extra == 'docs' +Provides-Extra: dotenv +Requires-Dist: python-dotenv; extra == 'dotenv' + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Flask + + +A Simple Example +---------------- + +.. code-block:: python + + from flask import Flask + + app = Flask(__name__) + + @app.route('/') + def hello(): + return 'Hello, World!' + +.. code-block:: text + + $ FLASK_APP=hello.py flask run + * Serving Flask app "hello" + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 + + +Links +----- + +* Website: https://www.palletsprojects.com/p/flask/ +* Documentation: http://flask.pocoo.org/docs/ +* License: `BSD `_ +* Releases: https://pypi.org/project/Flask/ +* Code: https://github.com/pallets/flask +* Issue tracker: https://github.com/pallets/flask/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/flask + * Windows: https://ci.appveyor.com/project/pallets/flask + +* Test coverage: https://codecov.io/gh/pallets/flask + +.. _WSGI: https://wsgi.readthedocs.io +.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/ +.. _Jinja: https://www.palletsprojects.com/p/jinja/ +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/RECORD new file mode 100644 index 0000000..6ace8a7 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/RECORD @@ -0,0 +1,48 @@ +../../../bin/flask,sha256=PJpKemt9ycjSY85Fc33FpXJqiYyDexlc2Z9ziSH4wPo,239 +Flask-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask-1.0.2.dist-info/LICENSE.txt,sha256=ziEXA3AIuaiUn1qe4cd1XxCESWTYrk4TjN7Qb06J3l8,1575 +Flask-1.0.2.dist-info/METADATA,sha256=iA5tiNWzTtgCVe80aTZGNWsckj853fJyfvHs9U-WZRk,4182 +Flask-1.0.2.dist-info/RECORD,, +Flask-1.0.2.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 +Flask-1.0.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 +Flask-1.0.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 +flask/__init__.py,sha256=qq8lK6QQbxJALf1igz7qsvUwOTAoKvFGfdLm7jPNsso,1673 +flask/__main__.py,sha256=pgIXrHhxM5MAMvgzAqWpw_t6AXZ1zG38us4JRgJKtxk,291 +flask/__pycache__/__init__.cpython-36.pyc,, +flask/__pycache__/__main__.cpython-36.pyc,, +flask/__pycache__/_compat.cpython-36.pyc,, +flask/__pycache__/app.cpython-36.pyc,, +flask/__pycache__/blueprints.cpython-36.pyc,, +flask/__pycache__/cli.cpython-36.pyc,, +flask/__pycache__/config.cpython-36.pyc,, +flask/__pycache__/ctx.cpython-36.pyc,, +flask/__pycache__/debughelpers.cpython-36.pyc,, +flask/__pycache__/globals.cpython-36.pyc,, +flask/__pycache__/helpers.cpython-36.pyc,, +flask/__pycache__/logging.cpython-36.pyc,, +flask/__pycache__/sessions.cpython-36.pyc,, +flask/__pycache__/signals.cpython-36.pyc,, +flask/__pycache__/templating.cpython-36.pyc,, +flask/__pycache__/testing.cpython-36.pyc,, +flask/__pycache__/views.cpython-36.pyc,, +flask/__pycache__/wrappers.cpython-36.pyc,, +flask/_compat.py,sha256=UDFGhosh6mOdNB-4evKPuneHum1OpcAlwTNJCRm0irQ,2892 +flask/app.py,sha256=ahpe3T8w98rQd_Er5d7uDxK57S1nnqGQx3V3hirBovU,94147 +flask/blueprints.py,sha256=Cyhl_x99tgwqEZPtNDJUFneAfVJxWfEU4bQA7zWS6VU,18331 +flask/cli.py,sha256=30QYAO10Do9LbZYCLgfI_xhKjASdLopL8wKKVUGS2oA,29442 +flask/config.py,sha256=kznUhj4DLYxsTF_4kfDG8GEHto1oZG_kqblyrLFtpqQ,9951 +flask/ctx.py,sha256=leFzS9fzmo0uaLCdxpHc5_iiJZ1H0X_Ig4yPCOvT--g,16224 +flask/debughelpers.py,sha256=1ceC-UyqZTd4KsJkf0OObHPsVt5R3T6vnmYhiWBjV-w,6479 +flask/globals.py,sha256=pGg72QW_-4xUfsI33I5L_y76c21AeqfSqXDcbd8wvXU,1649 +flask/helpers.py,sha256=YCl8D1plTO1evEYP4KIgaY3H8Izww5j4EdgRJ89oHTw,40106 +flask/json/__init__.py,sha256=Ns1Hj805XIxuBMh2z0dYnMVfb_KUgLzDmP3WoUYaPhw,10729 +flask/json/__pycache__/__init__.cpython-36.pyc,, +flask/json/__pycache__/tag.cpython-36.pyc,, +flask/json/tag.py,sha256=9ehzrmt5k7hxf7ZEK0NOs3swvQyU9fWNe-pnYe69N60,8223 +flask/logging.py,sha256=qV9h0vt7NIRkKM9OHDWndzO61E5CeBMlqPJyTt-W2Wc,2231 +flask/sessions.py,sha256=2XHV4ASREhSEZ8bsPQW6pNVNuFtbR-04BzfKg0AfvHo,14452 +flask/signals.py,sha256=BGQbVyCYXnzKK2DVCzppKFyWN1qmrtW1QMAYUs-1Nr8,2211 +flask/templating.py,sha256=FDfWMbpgpC3qObW8GGXRAVrkHFF8K4CHOJymB1wvULI,4914 +flask/testing.py,sha256=XD3gWNvLUV8dqVHwKd9tZzsj81fSHtjOphQ1wTNtlMs,9379 +flask/views.py,sha256=Wy-_WkUVtCfE2zCXYeJehNgHuEtviE4v3HYfJ--MpbY,5733 +flask/wrappers.py,sha256=1Z9hF5-hXQajn_58XITQFRY8efv3Vy3uZ0avBfZu6XI,7511 diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/WHEEL new file mode 100644 index 0000000..f21b51c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/entry_points.txt b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/entry_points.txt new file mode 100644 index 0000000..1eb0252 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +flask = flask.cli:main + diff --git a/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..7e10602 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Flask-1.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +flask diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/DESCRIPTION.rst b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..1594da5 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/DESCRIPTION.rst @@ -0,0 +1,37 @@ + +Jinja2 +~~~~~~ + +Jinja2 is a template engine written in pure Python. It provides a +`Django`_ inspired non-XML syntax but supports inline expressions and +an optional `sandboxed`_ environment. + +Nutshell +-------- + +Here a small example of a Jinja template:: + + {% extends 'base.html' %} + {% block title %}Memberlist{% endblock %} + {% block content %} + + {% endblock %} + +Philosophy +---------- + +Application logic is for the controller but don't try to make the life +for the template designer too hard by giving him too few functionality. + +For more informations visit the new `Jinja2 webpage`_ and `documentation`_. + +.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) +.. _Django: https://www.djangoproject.com/ +.. _Jinja2 webpage: http://jinja.pocoo.org/ +.. _documentation: http://jinja.pocoo.org/2/documentation/ + + diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/LICENSE.txt b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/LICENSE.txt new file mode 100644 index 0000000..10145a2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/LICENSE.txt @@ -0,0 +1,31 @@ +Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/METADATA new file mode 100644 index 0000000..40f2b46 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.0 +Name: Jinja2 +Version: 2.10 +Summary: A small but fast and easy to use stand-alone template engine written in pure python. +Home-page: http://jinja.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description-Content-Type: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Dist: MarkupSafe (>=0.23) +Provides-Extra: i18n +Requires-Dist: Babel (>=0.8); extra == 'i18n' + + +Jinja2 +~~~~~~ + +Jinja2 is a template engine written in pure Python. It provides a +`Django`_ inspired non-XML syntax but supports inline expressions and +an optional `sandboxed`_ environment. + +Nutshell +-------- + +Here a small example of a Jinja template:: + + {% extends 'base.html' %} + {% block title %}Memberlist{% endblock %} + {% block content %} + + {% endblock %} + +Philosophy +---------- + +Application logic is for the controller but don't try to make the life +for the template designer too hard by giving him too few functionality. + +For more informations visit the new `Jinja2 webpage`_ and `documentation`_. + +.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) +.. _Django: https://www.djangoproject.com/ +.. _Jinja2 webpage: http://jinja.pocoo.org/ +.. _documentation: http://jinja.pocoo.org/2/documentation/ + + diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/RECORD new file mode 100644 index 0000000..f66e1c4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/RECORD @@ -0,0 +1,63 @@ +Jinja2-2.10.dist-info/DESCRIPTION.rst,sha256=b5ckFDoM7vVtz_mAsJD4OPteFKCqE7beu353g4COoYI,978 +Jinja2-2.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-2.10.dist-info/LICENSE.txt,sha256=JvzUNv3Io51EiWrAPm8d_SXjhJnEjyDYvB3Tvwqqils,1554 +Jinja2-2.10.dist-info/METADATA,sha256=18EgU8zR6-av-0-5y_gXebzK4GnBB_76lALUsl-6QHM,2258 +Jinja2-2.10.dist-info/RECORD,, +Jinja2-2.10.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +Jinja2-2.10.dist-info/entry_points.txt,sha256=NdzVcOrqyNyKDxD09aERj__3bFx2paZhizFDsKmVhiA,72 +Jinja2-2.10.dist-info/metadata.json,sha256=NPUJ9TMBxVQAv_kTJzvU8HwmP-4XZvbK9mz6_4YUVl4,1473 +Jinja2-2.10.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=xJHjaMoy51_KXn1wf0cysH6tUUifUxZCwSOfcJGEYZw,2614 +jinja2/__pycache__/__init__.cpython-36.pyc,, +jinja2/__pycache__/_compat.cpython-36.pyc,, +jinja2/__pycache__/_identifier.cpython-36.pyc,, +jinja2/__pycache__/asyncfilters.cpython-36.pyc,, +jinja2/__pycache__/asyncsupport.cpython-36.pyc,, +jinja2/__pycache__/bccache.cpython-36.pyc,, +jinja2/__pycache__/compiler.cpython-36.pyc,, +jinja2/__pycache__/constants.cpython-36.pyc,, +jinja2/__pycache__/debug.cpython-36.pyc,, +jinja2/__pycache__/defaults.cpython-36.pyc,, +jinja2/__pycache__/environment.cpython-36.pyc,, +jinja2/__pycache__/exceptions.cpython-36.pyc,, +jinja2/__pycache__/ext.cpython-36.pyc,, +jinja2/__pycache__/filters.cpython-36.pyc,, +jinja2/__pycache__/idtracking.cpython-36.pyc,, +jinja2/__pycache__/lexer.cpython-36.pyc,, +jinja2/__pycache__/loaders.cpython-36.pyc,, +jinja2/__pycache__/meta.cpython-36.pyc,, +jinja2/__pycache__/nativetypes.cpython-36.pyc,, +jinja2/__pycache__/nodes.cpython-36.pyc,, +jinja2/__pycache__/optimizer.cpython-36.pyc,, +jinja2/__pycache__/parser.cpython-36.pyc,, +jinja2/__pycache__/runtime.cpython-36.pyc,, +jinja2/__pycache__/sandbox.cpython-36.pyc,, +jinja2/__pycache__/tests.cpython-36.pyc,, +jinja2/__pycache__/utils.cpython-36.pyc,, +jinja2/__pycache__/visitor.cpython-36.pyc,, +jinja2/_compat.py,sha256=xP60CE5Qr8FTYcDE1f54tbZLKGvMwYml4-8T7Q4KG9k,2596 +jinja2/_identifier.py,sha256=W1QBSY-iJsyt6oR_nKSuNNCzV95vLIOYgUNPUI1d5gU,1726 +jinja2/asyncfilters.py,sha256=cTDPvrS8Hp_IkwsZ1m9af_lr5nHysw7uTa5gV0NmZVE,4144 +jinja2/asyncsupport.py,sha256=UErQ3YlTLaSjFb94P4MVn08-aVD9jJxty2JVfMRb-1M,7878 +jinja2/bccache.py,sha256=nQldx0ZRYANMyfvOihRoYFKSlUdd5vJkS7BjxNwlOZM,12794 +jinja2/compiler.py,sha256=BqC5U6JxObSRhblyT_a6Tp5GtEU5z3US1a4jLQaxxgo,65386 +jinja2/constants.py,sha256=uwwV8ZUhHhacAuz5PTwckfsbqBaqM7aKfyJL7kGX5YQ,1626 +jinja2/debug.py,sha256=WTVeUFGUa4v6ReCsYv-iVPa3pkNB75OinJt3PfxNdXs,12045 +jinja2/defaults.py,sha256=Em-95hmsJxIenDCZFB1YSvf9CNhe9rBmytN3yUrBcWA,1400 +jinja2/environment.py,sha256=VnkAkqw8JbjZct4tAyHlpBrka2vqB-Z58RAP-32P1ZY,50849 +jinja2/exceptions.py,sha256=_Rj-NVi98Q6AiEjYQOsP8dEIdu5AlmRHzcSNOPdWix4,4428 +jinja2/ext.py,sha256=atMQydEC86tN1zUsdQiHw5L5cF62nDbqGue25Yiu3N4,24500 +jinja2/filters.py,sha256=yOAJk0MsH-_gEC0i0U6NweVQhbtYaC-uE8xswHFLF4w,36528 +jinja2/idtracking.py,sha256=2GbDSzIvGArEBGLkovLkqEfmYxmWsEf8c3QZwM4uNsw,9197 +jinja2/lexer.py,sha256=ySEPoXd1g7wRjsuw23uimS6nkGN5aqrYwcOKxCaVMBQ,28559 +jinja2/loaders.py,sha256=xiTuURKAEObyym0nU8PCIXu_Qp8fn0AJ5oIADUUm-5Q,17382 +jinja2/meta.py,sha256=fmKHxkmZYAOm9QyWWy8EMd6eefAIh234rkBMW2X4ZR8,4340 +jinja2/nativetypes.py,sha256=_sJhS8f-8Q0QMIC0dm1YEdLyxEyoO-kch8qOL5xUDfE,7308 +jinja2/nodes.py,sha256=L10L_nQDfubLhO3XjpF9qz46FSh2clL-3e49ogVlMmA,30853 +jinja2/optimizer.py,sha256=MsdlFACJ0FRdPtjmCAdt7JQ9SGrXFaDNUaslsWQaG3M,1722 +jinja2/parser.py,sha256=lPzTEbcpTRBLw8ii6OYyExHeAhaZLMA05Hpv4ll3ULk,35875 +jinja2/runtime.py,sha256=DHdD38Pq8gj7uWQC5usJyWFoNWL317A9AvXOW_CLB34,27755 +jinja2/sandbox.py,sha256=TVyZHlNqqTzsv9fv2NvJNmSdWRHTguhyMHdxjWms32U,16708 +jinja2/tests.py,sha256=iJQLwbapZr-EKquTG_fVOVdwHUUKf3SX9eNkjQDF8oU,4237 +jinja2/utils.py,sha256=q24VupGZotQ-uOyrJxCaXtDWhZC1RgsQG7kcdmjck2Q,20629 +jinja2/visitor.py,sha256=JD1H1cANA29JcntFfN5fPyqQxB4bI4wC00BzZa-XHks,3316 diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/entry_points.txt b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/entry_points.txt new file mode 100644 index 0000000..32e6b75 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/entry_points.txt @@ -0,0 +1,4 @@ + + [babel.extractors] + jinja2 = jinja2.ext:babel_extract[i18n] + \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/metadata.json b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/metadata.json new file mode 100644 index 0000000..7f5dc38 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Markup :: HTML"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://jinja.pocoo.org/"}}, "python.exports": {"babel.extractors": {"jinja2": "jinja2.ext:babel_extract [i18n]"}}}, "extras": ["i18n"], "generator": "bdist_wheel (0.30.0)", "license": "BSD", "metadata_version": "2.0", "name": "Jinja2", "run_requires": [{"extra": "i18n", "requires": ["Babel (>=0.8)"]}, {"requires": ["MarkupSafe (>=0.23)"]}], "summary": "A small but fast and easy to use stand-alone template engine written in pure python.", "version": "2.10"} \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Jinja2-2.10.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/LICENSE.txt b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..28c9258 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/LICENSE.txt @@ -0,0 +1,33 @@ +`BSD 3-Clause `_ + +Copyright © 2010 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +- Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/METADATA new file mode 100644 index 0000000..e532eb0 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 1.1.0 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://www.palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets Team +Maintainer-email: contact@palletsprojects.com +License: BSD +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/markupsafe +Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + >>> # escape replaces special characters and wraps in Markup + >>> escape('') + Markup(u'<script>alert(document.cookie);</script>') + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup('Hello') + Markup('hello') + >>> escape(Markup('Hello')) + Markup('hello') + >>> # Markup is a text subclass (str on Python 3, unicode on Python 2) + >>> # methods and operators escape their arguments + >>> template = Markup("Hello %s") + >>> template % '"World"' + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +libraries that use it. In order to grow the community of contributors +and users, and allow the maintainers to devote more time to the +projects, `please donate today`_. + +.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 + + +Links +----- + +* Website: https://www.palletsprojects.com/p/markupsafe/ +* Documentation: https://markupsafe.palletsprojects.com/ +* License: `BSD `_ +* Releases: https://pypi.org/project/MarkupSafe/ +* Code: https://github.com/pallets/markupsafe +* Issue tracker: https://github.com/pallets/markupsafe/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/markupsafe + * Windows: https://ci.appveyor.com/project/pallets/markupsafe + +* Test coverage: https://codecov.io/gh/pallets/markupsafe + + diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/RECORD new file mode 100644 index 0000000..8f0dba1 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/RECORD @@ -0,0 +1,16 @@ +MarkupSafe-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-1.1.0.dist-info/LICENSE.txt,sha256=7V249lpOdvRv2m6SF9gCDtq_nsg8tFpdeTdsWWM_g9M,1614 +MarkupSafe-1.1.0.dist-info/METADATA,sha256=usFnBges7tmAH4_Yt5Ypb8Bco4R9uLUdD0V6YHbvhLw,3585 +MarkupSafe-1.1.0.dist-info/RECORD,, +MarkupSafe-1.1.0.dist-info/WHEEL,sha256=d2ILPScH-y2UwGxsW1PeA2TT-KW0Git4AJ6LeOK8sQo,109 +MarkupSafe-1.1.0.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=T5J4pS7LRx1xRqfV3xz-QN_D9pSmfVDJnTrc2cTO4Ro,10164 +markupsafe/__pycache__/__init__.cpython-36.pyc,, +markupsafe/__pycache__/_compat.cpython-36.pyc,, +markupsafe/__pycache__/_constants.cpython-36.pyc,, +markupsafe/__pycache__/_native.cpython-36.pyc,, +markupsafe/_compat.py,sha256=3oSvQpEFzsJ29NKVy-Fqk6ZlRxmlCB5k0G21aN0zNtQ,596 +markupsafe/_constants.py,sha256=ueEz1Jxdw5TKWBbhPr4Ad_2L2MSEh73AYiYe4l3cZy4,4728 +markupsafe/_native.py,sha256=fUrjjbRXIpHM-8l9QXFJ2xg5rv_48U2aN99plyL0kfs,1911 +markupsafe/_speedups.c,sha256=VfElhhq9oulHEd2wBZ2MX9A80r4jFovsVGQD2zxmVk0,9883 +markupsafe/_speedups.cpython-36m-x86_64-linux-gnu.so,sha256=BS7m4DA4L7J_sXAEFkmQeW3HJStGrC1yXYRifVOaTvc,38555 diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/WHEEL new file mode 100644 index 0000000..92946fe --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/MarkupSafe-1.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/PKG-INFO b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/PKG-INFO new file mode 100644 index 0000000..9c20cdc --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/PKG-INFO @@ -0,0 +1,33 @@ +Metadata-Version: 1.1 +Name: PyYAML +Version: 3.13 +Summary: YAML parser and emitter for Python +Home-page: http://pyyaml.org/wiki/PyYAML +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Download-URL: http://pyyaml.org/download/pyyaml/PyYAML-3.13.tar.gz +Description: YAML is a data serialization format designed for human readability + and interaction with scripting languages. PyYAML is a YAML parser + and emitter for Python. + + PyYAML features a complete YAML 1.1 parser, Unicode support, pickle + support, capable extension API, and sensible error messages. PyYAML + supports standard YAML tags and provides Python-specific tags that + allow to represent an arbitrary Python object. + + PyYAML is applicable for a broad range of tasks from complex + configuration files to object serialization and persistance. +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup diff --git a/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/SOURCES.txt b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/SOURCES.txt new file mode 100644 index 0000000..bd794ab --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/SOURCES.txt @@ -0,0 +1,28 @@ +README +setup.cfg +setup.py +ext/_yaml.c +ext/_yaml.h +ext/_yaml.pxd +ext/_yaml.pyx +lib3/PyYAML.egg-info/PKG-INFO +lib3/PyYAML.egg-info/SOURCES.txt +lib3/PyYAML.egg-info/dependency_links.txt +lib3/PyYAML.egg-info/top_level.txt +lib3/yaml/__init__.py +lib3/yaml/composer.py +lib3/yaml/constructor.py +lib3/yaml/cyaml.py +lib3/yaml/dumper.py +lib3/yaml/emitter.py +lib3/yaml/error.py +lib3/yaml/events.py +lib3/yaml/loader.py +lib3/yaml/nodes.py +lib3/yaml/parser.py +lib3/yaml/reader.py +lib3/yaml/representer.py +lib3/yaml/resolver.py +lib3/yaml/scanner.py +lib3/yaml/serializer.py +lib3/yaml/tokens.py \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/dependency_links.txt b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/installed-files.txt b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/installed-files.txt new file mode 100644 index 0000000..810f0cb --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/installed-files.txt @@ -0,0 +1,38 @@ +../yaml/__init__.py +../yaml/__pycache__/__init__.cpython-36.pyc +../yaml/__pycache__/composer.cpython-36.pyc +../yaml/__pycache__/constructor.cpython-36.pyc +../yaml/__pycache__/cyaml.cpython-36.pyc +../yaml/__pycache__/dumper.cpython-36.pyc +../yaml/__pycache__/emitter.cpython-36.pyc +../yaml/__pycache__/error.cpython-36.pyc +../yaml/__pycache__/events.cpython-36.pyc +../yaml/__pycache__/loader.cpython-36.pyc +../yaml/__pycache__/nodes.cpython-36.pyc +../yaml/__pycache__/parser.cpython-36.pyc +../yaml/__pycache__/reader.cpython-36.pyc +../yaml/__pycache__/representer.cpython-36.pyc +../yaml/__pycache__/resolver.cpython-36.pyc +../yaml/__pycache__/scanner.cpython-36.pyc +../yaml/__pycache__/serializer.cpython-36.pyc +../yaml/__pycache__/tokens.cpython-36.pyc +../yaml/composer.py +../yaml/constructor.py +../yaml/cyaml.py +../yaml/dumper.py +../yaml/emitter.py +../yaml/error.py +../yaml/events.py +../yaml/loader.py +../yaml/nodes.py +../yaml/parser.py +../yaml/reader.py +../yaml/representer.py +../yaml/resolver.py +../yaml/scanner.py +../yaml/serializer.py +../yaml/tokens.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/top_level.txt new file mode 100644 index 0000000..e6475e9 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/PyYAML-3.13.egg-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/DESCRIPTION.rst b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..675f08d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,80 @@ +Werkzeug +======== + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +* An interactive debugger that allows inspecting stack traces and source + code in the browser with an interactive interpreter for any frame in + the stack. +* A full-featured request object with objects to interact with headers, + query args, form data, files, and cookies. +* A response object that can wrap other WSGI applications and handle + streaming data. +* A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables from + URLs. +* HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +* A threaded WSGI server for use while developing applications locally. +* A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up +to the developer to choose a template engine, database adapter, and even +how to handle requests. It can be used to build all sorts of end user +applications such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Links +----- + +* Website: https://www.palletsprojects.com/p/werkzeug/ +* Releases: https://pypi.org/project/Werkzeug/ +* Code: https://github.com/pallets/werkzeug +* Issue tracker: https://github.com/pallets/werkzeug/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/werkzeug + * Windows: https://ci.appveyor.com/project/davidism/werkzeug + +* Test coverage: https://codecov.io/gh/pallets/werkzeug + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/LICENSE.txt b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..1cc75bb --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/LICENSE.txt @@ -0,0 +1,31 @@ +Copyright © 2007 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/METADATA new file mode 100644 index 0000000..bfc3c4e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/METADATA @@ -0,0 +1,116 @@ +Metadata-Version: 2.0 +Name: Werkzeug +Version: 0.14.1 +Summary: The comprehensive WSGI web application library. +Home-page: https://www.palletsprojects.org/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description-Content-Type: UNKNOWN +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: dev +Requires-Dist: coverage; extra == 'dev' +Requires-Dist: pytest; extra == 'dev' +Requires-Dist: sphinx; extra == 'dev' +Requires-Dist: tox; extra == 'dev' +Provides-Extra: termcolor +Requires-Dist: termcolor; extra == 'termcolor' +Provides-Extra: watchdog +Requires-Dist: watchdog; extra == 'watchdog' + +Werkzeug +======== + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +* An interactive debugger that allows inspecting stack traces and source + code in the browser with an interactive interpreter for any frame in + the stack. +* A full-featured request object with objects to interact with headers, + query args, form data, files, and cookies. +* A response object that can wrap other WSGI applications and handle + streaming data. +* A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables from + URLs. +* HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +* A threaded WSGI server for use while developing applications locally. +* A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up +to the developer to choose a template engine, database adapter, and even +how to handle requests. It can be used to build all sorts of end user +applications such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Links +----- + +* Website: https://www.palletsprojects.com/p/werkzeug/ +* Releases: https://pypi.org/project/Werkzeug/ +* Code: https://github.com/pallets/werkzeug +* Issue tracker: https://github.com/pallets/werkzeug/issues +* Test status: + + * Linux, Mac: https://travis-ci.org/pallets/werkzeug + * Windows: https://ci.appveyor.com/project/davidism/werkzeug + +* Test coverage: https://codecov.io/gh/pallets/werkzeug + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/RECORD new file mode 100644 index 0000000..0953881 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/RECORD @@ -0,0 +1,97 @@ +Werkzeug-0.14.1.dist-info/DESCRIPTION.rst,sha256=rOCN36jwsWtWsTpqPG96z7FMilB5qI1CIARSKRuUmz8,2452 +Werkzeug-0.14.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Werkzeug-0.14.1.dist-info/LICENSE.txt,sha256=xndz_dD4m269AF9l_Xbl5V3tM1N3C1LoZC2PEPxWO-8,1534 +Werkzeug-0.14.1.dist-info/METADATA,sha256=FbfadrPdJNUWAxMOKxGUtHe5R3IDSBKYYmAz3FvI3uY,3872 +Werkzeug-0.14.1.dist-info/RECORD,, +Werkzeug-0.14.1.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +Werkzeug-0.14.1.dist-info/metadata.json,sha256=4489UTt6HBp2NQil95-pBkjU4Je93SMHvMxZ_rjOpqA,1452 +Werkzeug-0.14.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +werkzeug/__init__.py,sha256=NR0d4n_-U9BLVKlOISean3zUt2vBwhvK-AZE6M0sC0k,6842 +werkzeug/__pycache__/__init__.cpython-36.pyc,, +werkzeug/__pycache__/_compat.cpython-36.pyc,, +werkzeug/__pycache__/_internal.cpython-36.pyc,, +werkzeug/__pycache__/_reloader.cpython-36.pyc,, +werkzeug/__pycache__/datastructures.cpython-36.pyc,, +werkzeug/__pycache__/exceptions.cpython-36.pyc,, +werkzeug/__pycache__/filesystem.cpython-36.pyc,, +werkzeug/__pycache__/formparser.cpython-36.pyc,, +werkzeug/__pycache__/http.cpython-36.pyc,, +werkzeug/__pycache__/local.cpython-36.pyc,, +werkzeug/__pycache__/posixemulation.cpython-36.pyc,, +werkzeug/__pycache__/routing.cpython-36.pyc,, +werkzeug/__pycache__/script.cpython-36.pyc,, +werkzeug/__pycache__/security.cpython-36.pyc,, +werkzeug/__pycache__/serving.cpython-36.pyc,, +werkzeug/__pycache__/test.cpython-36.pyc,, +werkzeug/__pycache__/testapp.cpython-36.pyc,, +werkzeug/__pycache__/urls.cpython-36.pyc,, +werkzeug/__pycache__/useragents.cpython-36.pyc,, +werkzeug/__pycache__/utils.cpython-36.pyc,, +werkzeug/__pycache__/websocket.cpython-36.pyc,, +werkzeug/__pycache__/wrappers.cpython-36.pyc,, +werkzeug/__pycache__/wsgi.cpython-36.pyc,, +werkzeug/_compat.py,sha256=8c4U9o6A_TR9nKCcTbpZNxpqCXcXDVIbFawwKM2s92c,6311 +werkzeug/_internal.py,sha256=GhEyGMlsSz_tYjsDWO9TG35VN7304MM8gjKDrXLEdVc,13873 +werkzeug/_reloader.py,sha256=AyPphcOHPbu6qzW0UbrVvTDJdre5WgpxbhIJN_TqzUc,9264 +werkzeug/contrib/__init__.py,sha256=f7PfttZhbrImqpr5Ezre8CXgwvcGUJK7zWNpO34WWrw,623 +werkzeug/contrib/__pycache__/__init__.cpython-36.pyc,, +werkzeug/contrib/__pycache__/atom.cpython-36.pyc,, +werkzeug/contrib/__pycache__/cache.cpython-36.pyc,, +werkzeug/contrib/__pycache__/fixers.cpython-36.pyc,, +werkzeug/contrib/__pycache__/iterio.cpython-36.pyc,, +werkzeug/contrib/__pycache__/jsrouting.cpython-36.pyc,, +werkzeug/contrib/__pycache__/limiter.cpython-36.pyc,, +werkzeug/contrib/__pycache__/lint.cpython-36.pyc,, +werkzeug/contrib/__pycache__/profiler.cpython-36.pyc,, +werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc,, +werkzeug/contrib/__pycache__/sessions.cpython-36.pyc,, +werkzeug/contrib/__pycache__/testtools.cpython-36.pyc,, +werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc,, +werkzeug/contrib/atom.py,sha256=qqfJcfIn2RYY-3hO3Oz0aLq9YuNubcPQ_KZcNsDwVJo,15575 +werkzeug/contrib/cache.py,sha256=xBImHNj09BmX_7kC5NUCx8f_l4L8_O7zi0jCL21UZKE,32163 +werkzeug/contrib/fixers.py,sha256=gR06T-w71ur-tHQ_31kP_4jpOncPJ4Wc1dOqTvYusr8,10179 +werkzeug/contrib/iterio.py,sha256=RlqDvGhz0RneTpzE8dVc-yWCUv4nkPl1jEc_EDp2fH0,10814 +werkzeug/contrib/jsrouting.py,sha256=QTmgeDoKXvNK02KzXgx9lr3cAH6fAzpwF5bBdPNvJPs,8564 +werkzeug/contrib/limiter.py,sha256=iS8-ahPZ-JLRnmfIBzxpm7O_s3lPsiDMVWv7llAIDCI,1334 +werkzeug/contrib/lint.py,sha256=Mj9NeUN7s4zIUWeQOAVjrmtZIcl3Mm2yDe9BSIr9YGE,12558 +werkzeug/contrib/profiler.py,sha256=ISwCWvwVyGpDLRBRpLjo_qUWma6GXYBrTAco4PEQSHY,5151 +werkzeug/contrib/securecookie.py,sha256=uWMyHDHY3lkeBRiCSayGqWkAIy4a7xAbSE_Hln9ecqc,12196 +werkzeug/contrib/sessions.py,sha256=39LVNvLbm5JWpbxM79WC2l87MJFbqeISARjwYbkJatw,12577 +werkzeug/contrib/testtools.py,sha256=G9xN-qeihJlhExrIZMCahvQOIDxdL9NiX874jiiHFMs,2453 +werkzeug/contrib/wrappers.py,sha256=v7OYlz7wQtDlS9fey75UiRZ1IkUWqCpzbhsLy4k14Hw,10398 +werkzeug/datastructures.py,sha256=3IgNKNqrz-ZjmAG7y3YgEYK-enDiMT_b652PsypWcYg,90080 +werkzeug/debug/__init__.py,sha256=uSn9BqCZ5E3ySgpoZtundpROGsn-uYvZtSFiTfAX24M,17452 +werkzeug/debug/__pycache__/__init__.cpython-36.pyc,, +werkzeug/debug/__pycache__/console.cpython-36.pyc,, +werkzeug/debug/__pycache__/repr.cpython-36.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-36.pyc,, +werkzeug/debug/console.py,sha256=n3-dsKk1TsjnN-u4ZgmuWCU_HO0qw5IA7ttjhyyMM6I,5607 +werkzeug/debug/repr.py,sha256=bKqstDYGfECpeLerd48s_hxuqK4b6UWnjMu3d_DHO8I,9340 +werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=PKPVYuyO4SX1hkqLOwCLvmIEO5154WatFYaXE-zIfKI,6264 +werkzeug/debug/shared/jquery.js,sha256=7LkWEzqTdpEfELxcZZlS6wAx5Ff13zZ83lYO2_ujj7g,95957 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 +werkzeug/debug/shared/style.css,sha256=IEO0PC2pWmh2aEyGCaN--txuWsRCliuhlbEhPDFwh0A,6270 +werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 +werkzeug/debug/tbtools.py,sha256=rBudXCmkVdAKIcdhxANxgf09g6kQjJWW9_5bjSpr4OY,18451 +werkzeug/exceptions.py,sha256=3wp95Hqj9FqV8MdikV99JRcHse_fSMn27V8tgP5Hw2c,20505 +werkzeug/filesystem.py,sha256=hHWeWo_gqLMzTRfYt8-7n2wWcWUNTnDyudQDLOBEICE,2175 +werkzeug/formparser.py,sha256=mUuCwjzjb8_E4RzrAT2AioLuZSYpqR1KXTK6LScRYzA,21722 +werkzeug/http.py,sha256=RQg4MJuhRv2isNRiEh__Phh09ebpfT3Kuu_GfrZ54_c,40079 +werkzeug/local.py,sha256=QdQhWV5L8p1Y1CJ1CDStwxaUs24SuN5aebHwjVD08C8,14553 +werkzeug/posixemulation.py,sha256=xEF2Bxc-vUCPkiu4IbfWVd3LW7DROYAT-ExW6THqyzw,3519 +werkzeug/routing.py,sha256=2JVtdSgxKGeANy4Z_FP-dKESvKtkYGCZ1J2fARCLGCY,67214 +werkzeug/script.py,sha256=DwaVDcXdaOTffdNvlBdLitxWXjKaRVT32VbhDtljFPY,11365 +werkzeug/security.py,sha256=0m107exslz4QJLWQCpfQJ04z3re4eGHVggRvrQVAdWc,9193 +werkzeug/serving.py,sha256=A0flnIJHufdn2QJ9oeuHfrXwP3LzP8fn3rNW6hbxKUg,31926 +werkzeug/test.py,sha256=XmECSmnpASiYQTct4oMiWr0LT5jHWCtKqnpYKZd2ui8,36100 +werkzeug/testapp.py,sha256=3HQRW1sHZKXuAjCvFMet4KXtQG3loYTFnvn6LWt-4zI,9396 +werkzeug/urls.py,sha256=dUeLg2IeTm0WLmSvFeD4hBZWGdOs-uHudR5-t8n9zPo,36771 +werkzeug/useragents.py,sha256=BhYMf4cBTHyN4U0WsQedePIocmNlH_34C-UwqSThGCc,5865 +werkzeug/utils.py,sha256=BrY1j0DHQ8RTb0K1StIobKuMJhN9SQQkWEARbrh2qpk,22972 +werkzeug/websocket.py,sha256=PpSeDxXD_0UsPAa5hQhQNM6mxibeUgn8lA8eRqiS0vM,11344 +werkzeug/wrappers.py,sha256=kbyL_aFjxELwPgMwfNCYjKu-CR6kNkh-oO8wv3GXbk8,84511 +werkzeug/wsgi.py,sha256=1Nob-aeChWQf7MsiicO8RZt6J90iRzEcik44ev9Qu8s,49347 diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/metadata.json b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/metadata.json new file mode 100644 index 0000000..bca8d12 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "The comprehensive WSGI web application library.", "classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"project_urls": {"Home": "https://www.palletsprojects.org/p/werkzeug/"}, "contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}}}, "license": "BSD", "metadata_version": "2.0", "name": "Werkzeug", "platform": "any", "extras": ["dev", "termcolor", "watchdog"], "run_requires": [{"requires": ["coverage", "pytest", "sphinx", "tox"], "extra": "dev"}, {"requires": ["termcolor"], "extra": "termcolor"}, {"requires": ["watchdog"], "extra": "watchdog"}], "version": "0.14.1"} \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/Werkzeug-0.14.1.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/flask/venv/lib/python3.6/site-packages/__pycache__/easy_install.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/__pycache__/easy_install.cpython-36.pyc new file mode 100644 index 0000000..519996f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/__pycache__/easy_install.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/DESCRIPTION.rst b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..db8d16c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,272 @@ +Argh: The Natural CLI +===================== + +.. image:: https://img.shields.io/coveralls/neithere/argh.svg + :target: https://coveralls.io/r/neithere/argh + +.. image:: https://img.shields.io/travis/neithere/argh.svg + :target: https://travis-ci.org/neithere/argh + +.. image:: https://img.shields.io/pypi/format/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/status/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/v/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/pyversions/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/dd/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://readthedocs.org/projects/argh/badge/?version=stable + :target: http://argh.readthedocs.org/en/stable/ + +.. image:: https://readthedocs.org/projects/argh/badge/?version=latest + :target: http://argh.readthedocs.org/en/latest/ + +Building a command-line interface? Found yourself uttering "argh!" while +struggling with the API of `argparse`? Don't like the complexity but need +the power? + +.. epigraph:: + + Everything should be made as simple as possible, but no simpler. + + -- Albert Einstein (probably) + +`Argh` is a smart wrapper for `argparse`. `Argparse` is a very powerful tool; +`Argh` just makes it easy to use. + +In a nutshell +------------- + +`Argh`-powered applications are *simple* but *flexible*: + +:Modular: + Declaration of commands can be decoupled from assembling and dispatching; + +:Pythonic: + Commands are declared naturally, no complex API calls in most cases; + +:Reusable: + Commands are plain functions, can be used directly outside of CLI context; + +:Layered: + The complexity of code raises with requirements; + +:Transparent: + The full power of argparse is available whenever needed; + +:Namespaced: + Nested commands are a piece of cake, no messing with subparsers (though + they are of course used under the hood); + +:Term-Friendly: + Command output is processed with respect to stream encoding; + +:Unobtrusive: + `Argh` can dispatch a subset of pure-`argparse` code, and pure-`argparse` + code can update and dispatch a parser assembled with `Argh`; + +:DRY: + The amount of boilerplate code is minimal; among other things, `Argh` will: + + * infer command name from function name; + * infer arguments from function signature; + * infer argument type from the default value; + * infer argument action from the default value (for booleans); + * add an alias root command ``help`` for the ``--help`` argument. + +:NIH free: + `Argh` supports *completion*, *progress bars* and everything else by being + friendly to excellent 3rd-party libraries. No need to reinvent the wheel. + +Sounds good? Check the tutorial! + +Relation to argparse +-------------------- + +`Argh` is fully compatible with `argparse`. You can mix `Argh`-agnostic and +`Argh`-aware code. Just keep in mind that the dispatcher does some extra work +that a custom dispatcher may not do. + +Installation +------------ + +Using pip:: + + $ pip install argh + +Arch Linux (AUR):: + + $ yaourt python-argh + +Examples +-------- + +A very simple application with one command: + +.. code-block:: python + + import argh + + def main(): + return 'Hello world' + + argh.dispatch_command(main) + +Run it: + +.. code-block:: bash + + $ ./app.py + Hello world + +A potentially modular application with multiple commands: + +.. code-block:: python + + import argh + + # declaring: + + def echo(text): + "Returns given word as is." + return text + + def greet(name, greeting='Hello'): + "Greets the user with given name. The greeting is customizable." + return greeting + ', ' + name + + # assembling: + + parser = argh.ArghParser() + parser.add_commands([echo, greet]) + + # dispatching: + + if __name__ == '__main__': + parser.dispatch() + +Of course it works: + +.. code-block:: bash + + $ ./app.py greet Andy + Hello, Andy + + $ ./app.py greet Andy -g Arrrgh + Arrrgh, Andy + +Here's the auto-generated help for this application (note how the docstrings +are reused):: + + $ ./app.py help + + usage: app.py {echo,greet} ... + + positional arguments: + echo Returns given word as is. + greet Greets the user with given name. The greeting is customizable. + +...and for a specific command (an ordinary function signature is converted +to CLI arguments):: + + $ ./app.py help greet + + usage: app.py greet [-g GREETING] name + + Greets the user with given name. The greeting is customizable. + + positional arguments: + name + + optional arguments: + -g GREETING, --greeting GREETING 'Hello' + +(The help messages have been simplified a bit for brevity.) + +`Argh` easily maps plain Python functions to CLI. Sometimes this is not +enough; in these cases the powerful API of `argparse` is also available: + +.. code-block:: python + + @arg('text', default='hello world', nargs='+', help='The message') + def echo(text): + print text + +The approaches can be safely combined even up to this level: + +.. code-block:: python + + # adding help to `foo` which is in the function signature: + @arg('foo', help='blah') + # these are not in the signature so they go to **kwargs: + @arg('baz') + @arg('-q', '--quux') + # the function itself: + def cmd(foo, bar=1, *args, **kwargs): + yield foo + yield bar + yield ', '.join(args) + yield kwargs['baz'] + yield kwargs['quux'] + +Links +----- + +* `Project home page`_ (GitHub) +* `Documentation`_ (Read the Docs) +* `Package distribution`_ (PyPI) +* Questions, requests, bug reports, etc.: + + * `Issue tracker`_ (GitHub) + * `Mailing list`_ (subscribe to get important announcements) + * Direct e-mail (neithere at gmail com) + +.. _project home page: http://github.com/neithere/argh/ +.. _documentation: http://argh.readthedocs.org +.. _package distribution: http://pypi.python.org/pypi/argh +.. _issue tracker: http://github.com/neithere/argh/issues/ +.. _mailing list: http://groups.google.com/group/argh-users + +Author +------ + +Developed by Andrey Mikhaylenko since 2010. + +See file `AUTHORS` for a complete list of contributors to this library. + +Support +------- + +The fastest way to improve this project is to submit tested and documented +patches or detailed bug reports. + +Otherwise you can "flattr" me: |FlattrLink|_ + +.. _FlattrLink: https://flattr.com/submit/auto?user_id=neithere&url=http%3A%2F%2Fpypi.python.org%2Fpypi%2Fargh +.. |FlattrLink| image:: https://api.flattr.com/button/flattr-badge-large.png + :alt: Flattr the Argh project + +Licensing +--------- + +Argh is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published +by the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +Argh is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with Argh. If not, see . + + diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/INSTALLER b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/METADATA b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/METADATA new file mode 100644 index 0000000..64226d2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/METADATA @@ -0,0 +1,300 @@ +Metadata-Version: 2.0 +Name: argh +Version: 0.26.2 +Summary: An unobtrusive argparse wrapper with natural syntax +Home-page: http://github.com/neithere/argh/ +Author: Andrey Mikhaylenko +Author-email: neithere@gmail.com +License: GNU Lesser General Public License (LGPL), Version 3 +Keywords: cli command line argparse optparse argument option +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: User Interfaces +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides: argh + +Argh: The Natural CLI +===================== + +.. image:: https://img.shields.io/coveralls/neithere/argh.svg + :target: https://coveralls.io/r/neithere/argh + +.. image:: https://img.shields.io/travis/neithere/argh.svg + :target: https://travis-ci.org/neithere/argh + +.. image:: https://img.shields.io/pypi/format/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/status/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/v/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/pyversions/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://img.shields.io/pypi/dd/argh.svg + :target: https://pypi.python.org/pypi/argh + +.. image:: https://readthedocs.org/projects/argh/badge/?version=stable + :target: http://argh.readthedocs.org/en/stable/ + +.. image:: https://readthedocs.org/projects/argh/badge/?version=latest + :target: http://argh.readthedocs.org/en/latest/ + +Building a command-line interface? Found yourself uttering "argh!" while +struggling with the API of `argparse`? Don't like the complexity but need +the power? + +.. epigraph:: + + Everything should be made as simple as possible, but no simpler. + + -- Albert Einstein (probably) + +`Argh` is a smart wrapper for `argparse`. `Argparse` is a very powerful tool; +`Argh` just makes it easy to use. + +In a nutshell +------------- + +`Argh`-powered applications are *simple* but *flexible*: + +:Modular: + Declaration of commands can be decoupled from assembling and dispatching; + +:Pythonic: + Commands are declared naturally, no complex API calls in most cases; + +:Reusable: + Commands are plain functions, can be used directly outside of CLI context; + +:Layered: + The complexity of code raises with requirements; + +:Transparent: + The full power of argparse is available whenever needed; + +:Namespaced: + Nested commands are a piece of cake, no messing with subparsers (though + they are of course used under the hood); + +:Term-Friendly: + Command output is processed with respect to stream encoding; + +:Unobtrusive: + `Argh` can dispatch a subset of pure-`argparse` code, and pure-`argparse` + code can update and dispatch a parser assembled with `Argh`; + +:DRY: + The amount of boilerplate code is minimal; among other things, `Argh` will: + + * infer command name from function name; + * infer arguments from function signature; + * infer argument type from the default value; + * infer argument action from the default value (for booleans); + * add an alias root command ``help`` for the ``--help`` argument. + +:NIH free: + `Argh` supports *completion*, *progress bars* and everything else by being + friendly to excellent 3rd-party libraries. No need to reinvent the wheel. + +Sounds good? Check the tutorial! + +Relation to argparse +-------------------- + +`Argh` is fully compatible with `argparse`. You can mix `Argh`-agnostic and +`Argh`-aware code. Just keep in mind that the dispatcher does some extra work +that a custom dispatcher may not do. + +Installation +------------ + +Using pip:: + + $ pip install argh + +Arch Linux (AUR):: + + $ yaourt python-argh + +Examples +-------- + +A very simple application with one command: + +.. code-block:: python + + import argh + + def main(): + return 'Hello world' + + argh.dispatch_command(main) + +Run it: + +.. code-block:: bash + + $ ./app.py + Hello world + +A potentially modular application with multiple commands: + +.. code-block:: python + + import argh + + # declaring: + + def echo(text): + "Returns given word as is." + return text + + def greet(name, greeting='Hello'): + "Greets the user with given name. The greeting is customizable." + return greeting + ', ' + name + + # assembling: + + parser = argh.ArghParser() + parser.add_commands([echo, greet]) + + # dispatching: + + if __name__ == '__main__': + parser.dispatch() + +Of course it works: + +.. code-block:: bash + + $ ./app.py greet Andy + Hello, Andy + + $ ./app.py greet Andy -g Arrrgh + Arrrgh, Andy + +Here's the auto-generated help for this application (note how the docstrings +are reused):: + + $ ./app.py help + + usage: app.py {echo,greet} ... + + positional arguments: + echo Returns given word as is. + greet Greets the user with given name. The greeting is customizable. + +...and for a specific command (an ordinary function signature is converted +to CLI arguments):: + + $ ./app.py help greet + + usage: app.py greet [-g GREETING] name + + Greets the user with given name. The greeting is customizable. + + positional arguments: + name + + optional arguments: + -g GREETING, --greeting GREETING 'Hello' + +(The help messages have been simplified a bit for brevity.) + +`Argh` easily maps plain Python functions to CLI. Sometimes this is not +enough; in these cases the powerful API of `argparse` is also available: + +.. code-block:: python + + @arg('text', default='hello world', nargs='+', help='The message') + def echo(text): + print text + +The approaches can be safely combined even up to this level: + +.. code-block:: python + + # adding help to `foo` which is in the function signature: + @arg('foo', help='blah') + # these are not in the signature so they go to **kwargs: + @arg('baz') + @arg('-q', '--quux') + # the function itself: + def cmd(foo, bar=1, *args, **kwargs): + yield foo + yield bar + yield ', '.join(args) + yield kwargs['baz'] + yield kwargs['quux'] + +Links +----- + +* `Project home page`_ (GitHub) +* `Documentation`_ (Read the Docs) +* `Package distribution`_ (PyPI) +* Questions, requests, bug reports, etc.: + + * `Issue tracker`_ (GitHub) + * `Mailing list`_ (subscribe to get important announcements) + * Direct e-mail (neithere at gmail com) + +.. _project home page: http://github.com/neithere/argh/ +.. _documentation: http://argh.readthedocs.org +.. _package distribution: http://pypi.python.org/pypi/argh +.. _issue tracker: http://github.com/neithere/argh/issues/ +.. _mailing list: http://groups.google.com/group/argh-users + +Author +------ + +Developed by Andrey Mikhaylenko since 2010. + +See file `AUTHORS` for a complete list of contributors to this library. + +Support +------- + +The fastest way to improve this project is to submit tested and documented +patches or detailed bug reports. + +Otherwise you can "flattr" me: |FlattrLink|_ + +.. _FlattrLink: https://flattr.com/submit/auto?user_id=neithere&url=http%3A%2F%2Fpypi.python.org%2Fpypi%2Fargh +.. |FlattrLink| image:: https://api.flattr.com/button/flattr-badge-large.png + :alt: Flattr the Argh project + +Licensing +--------- + +Argh is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published +by the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +Argh is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with Argh. If not, see . + + diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/RECORD b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/RECORD new file mode 100644 index 0000000..df0f5ca --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/RECORD @@ -0,0 +1,31 @@ +argh-0.26.2.dist-info/DESCRIPTION.rst,sha256=lkRPNeZEMGOKCCfCGZ4_-2iPxLvQ3xyccMH7SUfEJ5Q,7304 +argh-0.26.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +argh-0.26.2.dist-info/METADATA,sha256=opzH3aI3sMQFcRmdmcrFR14tZfX6eJYsY6fi9HhivYI,8557 +argh-0.26.2.dist-info/RECORD,, +argh-0.26.2.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +argh-0.26.2.dist-info/metadata.json,sha256=pbgTgagWaqB_H3oMqcB3xmA4mln1QBf-aAh-iGLHIUA,1408 +argh-0.26.2.dist-info/top_level.txt,sha256=KbRbUYCNA1IhMbo789mEq1eXr9-lhdgo1K6Ww_p7zM0,5 +argh/__init__.py,sha256=GemyWFY_3uaAOGEzxWtvU6trRx_s9Z5OaC7tS1Xaxn0,495 +argh/__pycache__/__init__.cpython-36.pyc,, +argh/__pycache__/assembling.cpython-36.pyc,, +argh/__pycache__/compat.cpython-36.pyc,, +argh/__pycache__/completion.cpython-36.pyc,, +argh/__pycache__/constants.cpython-36.pyc,, +argh/__pycache__/decorators.cpython-36.pyc,, +argh/__pycache__/dispatching.cpython-36.pyc,, +argh/__pycache__/exceptions.cpython-36.pyc,, +argh/__pycache__/helpers.cpython-36.pyc,, +argh/__pycache__/interaction.cpython-36.pyc,, +argh/__pycache__/io.cpython-36.pyc,, +argh/__pycache__/utils.cpython-36.pyc,, +argh/assembling.py,sha256=rHn8Qh5hKk4NhEgqA0NECja4ZnxkHM4ywMqKE5eJEVw,17555 +argh/compat.py,sha256=DThKad-IYxH-vz3DQDdObHOOwCOWvJH4H2rM4Iq_yZs,2834 +argh/completion.py,sha256=oxj9vfyI1WATZRnlwcc7m__-uwM3-Eqdi-WzR3wnMus,2943 +argh/constants.py,sha256=FpUSsGACMlGgZZuaqa79BwVROojE5ABJ2OQgkXS2e6A,3436 +argh/decorators.py,sha256=qAdU2Y2vQdIshrABdJoliNMSSOjetZN4EpkpZ3sQ0AM,6141 +argh/dispatching.py,sha256=5Bg22bf3MJZrvv3B4CtcLu_Nuq_RLLyBxkl4Vlvl4nw,12465 +argh/exceptions.py,sha256=x_UJbpOkdm-qfOdp1A4ERvPB0JkqL74GXTlXwqu6E-I,1545 +argh/helpers.py,sha256=SKAB5AQrmsXczTmL9QkzBJVlWE7zAGqwxhYvpU1menQ,2151 +argh/interaction.py,sha256=XKlTPo0lru_VYd2Jppm-LlZoW8oDVvSgqOyjPjsdzVM,2403 +argh/io.py,sha256=nfnyC53E_KYXJsCg3fEiNqHBqs-e2TC-x1KrkWVI3Vw,2986 +argh/utils.py,sha256=4yI6-_Q33m4vrthwVpFI3ZlIWGZ_bx7Tff7BGuEB1E4,1676 diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/WHEEL b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/metadata.json b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/metadata.json new file mode 100644 index 0000000..4b6e876 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: User Interfaces", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "neithere@gmail.com", "name": "Andrey Mikhaylenko", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/neithere/argh/"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["cli", "command", "line", "argparse", "optparse", "argument", "option"], "license": "GNU Lesser General Public License (LGPL), Version 3", "metadata_version": "2.0", "name": "argh", "provides": "argh", "summary": "An unobtrusive argparse wrapper with natural syntax", "test_requires": [{"requires": ["iocapture", "mock", "pytest"]}], "version": "0.26.2"} \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/top_level.txt new file mode 100644 index 0000000..e795266 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh-0.26.2.dist-info/top_level.txt @@ -0,0 +1 @@ +argh diff --git a/flask/venv/lib/python3.6/site-packages/argh/__init__.py b/flask/venv/lib/python3.6/site-packages/argh/__init__.py new file mode 100644 index 0000000..481f1b4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/__init__.py @@ -0,0 +1,19 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +from .assembling import * +from .decorators import * +from .dispatching import * +from .exceptions import * +from .interaction import * +from .helpers import * + + +__version__ = '0.26.2' diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..baf99df Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/assembling.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/assembling.cpython-36.pyc new file mode 100644 index 0000000..3801a49 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/assembling.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..88f2218 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/completion.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/completion.cpython-36.pyc new file mode 100644 index 0000000..ac6609d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/completion.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/constants.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/constants.cpython-36.pyc new file mode 100644 index 0000000..4df77f0 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/constants.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/decorators.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/decorators.cpython-36.pyc new file mode 100644 index 0000000..55015b1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/decorators.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/dispatching.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/dispatching.cpython-36.pyc new file mode 100644 index 0000000..4676f6d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/dispatching.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/exceptions.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 0000000..bbba6fd Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/exceptions.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/helpers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/helpers.cpython-36.pyc new file mode 100644 index 0000000..3ec1d22 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/helpers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/interaction.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/interaction.cpython-36.pyc new file mode 100644 index 0000000..69fd6f5 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/interaction.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/io.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/io.cpython-36.pyc new file mode 100644 index 0000000..73357b9 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/io.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/__pycache__/utils.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..373cf68 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/argh/__pycache__/utils.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/argh/assembling.py b/flask/venv/lib/python3.6/site-packages/argh/assembling.py new file mode 100644 index 0000000..afe7ba7 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/assembling.py @@ -0,0 +1,501 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Assembling +~~~~~~~~~~ + +Functions and classes to properly assemble your commands in a parser. +""" +import argparse +import sys +import warnings + +from argh.completion import COMPLETION_ENABLED +from argh.compat import OrderedDict +from argh.constants import ( + ATTR_ALIASES, + ATTR_ARGS, + ATTR_NAME, + ATTR_EXPECTS_NAMESPACE_OBJECT, + PARSER_FORMATTER, + DEFAULT_ARGUMENT_TEMPLATE, + DEST_FUNCTION, +) +from argh.utils import get_subparsers, get_arg_spec +from argh.exceptions import AssemblingError + + +__all__ = [ + 'SUPPORTS_ALIASES', + 'set_default_command', + 'add_commands', + 'add_subcommands', +] + + +def _check_support_aliases(): + p = argparse.ArgumentParser() + s = p.add_subparsers() + try: + s.add_parser('x', aliases=[]) + except TypeError: + return False + else: + return True + + +SUPPORTS_ALIASES = _check_support_aliases() +""" +Calculated on load. If `True`, current version of argparse supports +alternative command names (can be set via :func:`~argh.decorators.aliases`). +""" + + +def _get_args_from_signature(function): + if getattr(function, ATTR_EXPECTS_NAMESPACE_OBJECT, False): + return + + spec = get_arg_spec(function) + + defaults = dict(zip(*[reversed(x) for x in (spec.args, + spec.defaults or [])])) + defaults.update(getattr(spec, 'kwonlydefaults', None) or {}) + + kwonly = getattr(spec, 'kwonlyargs', []) + + if sys.version_info < (3,0): + annotations = {} + else: + annotations = dict((k,v) for k,v in function.__annotations__.items() + if isinstance(v, str)) + + # define the list of conflicting option strings + # (short forms, i.e. single-character ones) + chars = [a[0] for a in spec.args + kwonly] + char_counts = dict((char, chars.count(char)) for char in set(chars)) + conflicting_opts = tuple(char for char in char_counts + if 1 < char_counts[char]) + + for name in spec.args + kwonly: + flags = [] # name_or_flags + akwargs = {} # keyword arguments for add_argument() + + if name in annotations: + # help message: func(a : "b") -> add_argument("a", help="b") + akwargs.update(help=annotations.get(name)) + + if name in defaults or name in kwonly: + if name in defaults: + akwargs.update(default=defaults.get(name)) + else: + akwargs.update(required=True) + flags = ('-{0}'.format(name[0]), '--{0}'.format(name)) + if name.startswith(conflicting_opts): + # remove short name + flags = flags[1:] + + else: + # positional argument + flags = (name,) + + # cmd(foo_bar) -> add_argument('foo-bar') + flags = tuple(x.replace('_', '-') for x in flags) + + yield dict(option_strings=flags, **akwargs) + + if spec.varargs: + # *args + yield dict(option_strings=[spec.varargs], nargs='*') + + +def _guess(kwargs): + """ + Adds types, actions, etc. to given argument specification. + For example, ``default=3`` implies ``type=int``. + + :param arg: a :class:`argh.utils.Arg` instance + """ + guessed = {} + + # Parser actions that accept argument 'type' + TYPE_AWARE_ACTIONS = 'store', 'append' + + # guess type/action from default value + value = kwargs.get('default') + if value is not None: + if isinstance(value, bool): + if kwargs.get('action') is None: + # infer action from default value + guessed['action'] = 'store_false' if value else 'store_true' + elif kwargs.get('type') is None: + # infer type from default value + # (make sure that action handler supports this keyword) + if kwargs.get('action', 'store') in TYPE_AWARE_ACTIONS: + guessed['type'] = type(value) + + # guess type from choices (first item) + if kwargs.get('choices') and 'type' not in list(guessed) + list(kwargs): + guessed['type'] = type(kwargs['choices'][0]) + + return dict(kwargs, **guessed) + + +def _is_positional(args, prefix_chars='-'): + assert args + if 1 < len(args) or args[0][0].startswith(tuple(prefix_chars)): + return False + else: + return True + + +def _get_parser_param_kwargs(parser, argspec): + argspec = argspec.copy() # parser methods modify source data + args = argspec['option_strings'] + + if _is_positional(args, prefix_chars=parser.prefix_chars): + get_kwargs = parser._get_positional_kwargs + else: + get_kwargs = parser._get_optional_kwargs + + kwargs = get_kwargs(*args, **argspec) + + kwargs['dest'] = kwargs['dest'].replace('-', '_') + + return kwargs + + +def _get_dest(parser, argspec): + kwargs = _get_parser_param_kwargs(parser, argspec) + return kwargs['dest'] + + +def _require_support_for_default_command_with_subparsers(): + if sys.version_info < (3,4): + raise AssemblingError( + 'Argparse library bundled with this version of Python ' + 'does not support combining a default command with nested ones.') + + +def set_default_command(parser, function): + """ + Sets default command (i.e. a function) for given parser. + + If `parser.description` is empty and the function has a docstring, + it is used as the description. + + .. note:: + + An attempt to set default command to a parser which already has + subparsers (e.g. added with :func:`~argh.assembling.add_commands`) + results in a `AssemblingError`. + + .. note:: + + If there are both explicitly declared arguments (e.g. via + :func:`~argh.decorators.arg`) and ones inferred from the function + signature (e.g. via :func:`~argh.decorators.command`), declared ones + will be merged into inferred ones. If an argument does not conform + function signature, `AssemblingError` is raised. + + .. note:: + + If the parser was created with ``add_help=True`` (which is by default), + option name ``-h`` is silently removed from any argument. + + """ + if parser._subparsers: + _require_support_for_default_command_with_subparsers() + + spec = get_arg_spec(function) + + declared_args = getattr(function, ATTR_ARGS, []) + inferred_args = list(_get_args_from_signature(function)) + + if inferred_args and declared_args: + # We've got a mixture of declared and inferred arguments + + # a mapping of "dest" strings to argument declarations. + # + # * a "dest" string is a normalized form of argument name, i.e.: + # + # '-f', '--foo' → 'foo' + # 'foo-bar' → 'foo_bar' + # + # * argument declaration is a dictionary representing an argument; + # it is obtained either from _get_args_from_signature() or from + # an @arg decorator (as is). + # + dests = OrderedDict() + + for argspec in inferred_args: + dest = _get_parser_param_kwargs(parser, argspec)['dest'] + dests[dest] = argspec + + for declared_kw in declared_args: + # an argument is declared via decorator + dest = _get_dest(parser, declared_kw) + if dest in dests: + # the argument is already known from function signature + # + # now make sure that this declared arg conforms to the function + # signature and therefore only refines an inferred arg: + # + # @arg('my-foo') maps to func(my_foo) + # @arg('--my-bar') maps to func(my_bar=...) + + # either both arguments are positional or both are optional + decl_positional = _is_positional(declared_kw['option_strings']) + infr_positional = _is_positional(dests[dest]['option_strings']) + if decl_positional != infr_positional: + kinds = {True: 'positional', False: 'optional'} + raise AssemblingError( + '{func}: argument "{dest}" declared as {kind_i} ' + '(in function signature) and {kind_d} (via decorator)' + .format( + func=function.__name__, + dest=dest, + kind_i=kinds[infr_positional], + kind_d=kinds[decl_positional], + )) + + # merge explicit argument declaration into the inferred one + # (e.g. `help=...`) + dests[dest].update(**declared_kw) + else: + # the argument is not in function signature + varkw = getattr(spec, 'varkw', getattr(spec, 'keywords', [])) + if varkw: + # function accepts **kwargs; the argument goes into it + dests[dest] = declared_kw + else: + # there's no way we can map the argument declaration + # to function signature + xs = (dests[x]['option_strings'] for x in dests) + raise AssemblingError( + '{func}: argument {flags} does not fit ' + 'function signature: {sig}'.format( + flags=', '.join(declared_kw['option_strings']), + func=function.__name__, + sig=', '.join('/'.join(x) for x in xs))) + + # pack the modified data back into a list + inferred_args = dests.values() + + command_args = inferred_args or declared_args + + # add types, actions, etc. (e.g. default=3 implies type=int) + command_args = [_guess(x) for x in command_args] + + for draft in command_args: + draft = draft.copy() + if 'help' not in draft: + draft.update(help=DEFAULT_ARGUMENT_TEMPLATE) + dest_or_opt_strings = draft.pop('option_strings') + if parser.add_help and '-h' in dest_or_opt_strings: + dest_or_opt_strings = [x for x in dest_or_opt_strings if x != '-h'] + completer = draft.pop('completer', None) + try: + action = parser.add_argument(*dest_or_opt_strings, **draft) + if COMPLETION_ENABLED and completer: + action.completer = completer + except Exception as e: + raise type(e)('{func}: cannot add arg {args}: {msg}'.format( + args='/'.join(dest_or_opt_strings), func=function.__name__, msg=e)) + + if function.__doc__ and not parser.description: + parser.description = function.__doc__ + parser.set_defaults(**{ + DEST_FUNCTION: function, + }) + + +def add_commands(parser, functions, namespace=None, namespace_kwargs=None, + func_kwargs=None, + # deprecated args: + title=None, description=None, help=None): + """ + Adds given functions as commands to given parser. + + :param parser: + + an :class:`argparse.ArgumentParser` instance. + + :param functions: + + a list of functions. A subparser is created for each of them. + If the function is decorated with :func:`~argh.decorators.arg`, the + arguments are passed to :class:`argparse.ArgumentParser.add_argument`. + See also :func:`~argh.dispatching.dispatch` for requirements + concerning function signatures. The command name is inferred from the + function name. Note that the underscores in the name are replaced with + hyphens, i.e. function name "foo_bar" becomes command name "foo-bar". + + :param namespace: + + an optional string representing the group of commands. For example, if + a command named "hello" is added without the namespace, it will be + available as "prog.py hello"; if the namespace if specified as "greet", + then the command will be accessible as "prog.py greet hello". The + namespace itself is not callable, so "prog.py greet" will fail and only + display a help message. + + :param func_kwargs: + + a `dict` of keyword arguments to be passed to each nested ArgumentParser + instance created per command (i.e. per function). Members of this + dictionary have the highest priority, so a function's docstring is + overridden by a `help` in `func_kwargs` (if present). + + :param namespace_kwargs: + + a `dict` of keyword arguments to be passed to the nested ArgumentParser + instance under given `namespace`. + + Deprecated params that should be moved into `namespace_kwargs`: + + :param title: + + passed to :meth:`argparse.ArgumentParser.add_subparsers` as `title`. + + .. deprecated:: 0.26.0 + + Please use `namespace_kwargs` instead. + + :param description: + + passed to :meth:`argparse.ArgumentParser.add_subparsers` as + `description`. + + .. deprecated:: 0.26.0 + + Please use `namespace_kwargs` instead. + + :param help: + + passed to :meth:`argparse.ArgumentParser.add_subparsers` as `help`. + + .. deprecated:: 0.26.0 + + Please use `namespace_kwargs` instead. + + .. note:: + + This function modifies the parser object. Generally side effects are + bad practice but we don't seem to have any choice as ArgumentParser is + pretty opaque. + You may prefer :class:`~argh.helpers.ArghParser.add_commands` for a bit + more predictable API. + + .. note:: + + An attempt to add commands to a parser which already has a default + function (e.g. added with :func:`~argh.assembling.set_default_command`) + results in `AssemblingError`. + + """ + # FIXME "namespace" is a correct name but it clashes with the "namespace" + # that represents arguments (argparse.Namespace and our ArghNamespace). + # We should rename the argument here. + + if DEST_FUNCTION in parser._defaults: + _require_support_for_default_command_with_subparsers() + + namespace_kwargs = namespace_kwargs or {} + + # FIXME remove this by 1.0 + # + if title: + warnings.warn('argument `title` is deprecated in add_commands(),' + ' use `parser_kwargs` instead', DeprecationWarning) + namespace_kwargs['description'] = title + if help: + warnings.warn('argument `help` is deprecated in add_commands(),' + ' use `parser_kwargs` instead', DeprecationWarning) + namespace_kwargs['help'] = help + if description: + warnings.warn('argument `description` is deprecated in add_commands(),' + ' use `parser_kwargs` instead', DeprecationWarning) + namespace_kwargs['description'] = description + # + # / + + subparsers_action = get_subparsers(parser, create=True) + + if namespace: + # Make a nested parser and init a deeper _SubParsersAction under it. + + # Create a named group of commands. It will be listed along with + # root-level commands in ``app.py --help``; in that context its `title` + # can be used as a short description on the right side of its name. + # Normally `title` is shown above the list of commands + # in ``app.py my-namespace --help``. + subsubparser_kw = { + 'help': namespace_kwargs.get('title'), + } + subsubparser = subparsers_action.add_parser(namespace, **subsubparser_kw) + subparsers_action = subsubparser.add_subparsers(**namespace_kwargs) + else: + assert not namespace_kwargs, ('`parser_kwargs` only makes sense ' + 'with `namespace`.') + + for func in functions: + cmd_name, func_parser_kwargs = _extract_command_meta_from_func(func) + + # override any computed kwargs by manually supplied ones + if func_kwargs: + func_parser_kwargs.update(func_kwargs) + + # create and set up the parser for this command + command_parser = subparsers_action.add_parser(cmd_name, **func_parser_kwargs) + set_default_command(command_parser, func) + + +def _extract_command_meta_from_func(func): + # use explicitly defined name; if none, use function name (a_b → a-b) + cmd_name = getattr(func, ATTR_NAME, + func.__name__.replace('_','-')) + + func_parser_kwargs = { + + # add command help from function's docstring + 'help': func.__doc__, + + # set default formatter + 'formatter_class': PARSER_FORMATTER, + + } + + # try adding aliases for command name + if SUPPORTS_ALIASES: + func_parser_kwargs['aliases'] = getattr(func, ATTR_ALIASES, []) + + return cmd_name, func_parser_kwargs + + +def add_subcommands(parser, namespace, functions, **namespace_kwargs): + """ + A wrapper for :func:`add_commands`. + + These examples are equivalent:: + + add_commands(parser, [get, put], namespace='db', + namespace_kwargs={ + 'title': 'database commands', + 'help': 'CRUD for our silly database' + }) + + add_subcommands(parser, 'db', [get, put], + title='database commands', + help='CRUD for our silly database') + + """ + add_commands(parser, functions, namespace=namespace, + namespace_kwargs=namespace_kwargs) diff --git a/flask/venv/lib/python3.6/site-packages/argh/compat.py b/flask/venv/lib/python3.6/site-packages/argh/compat.py new file mode 100644 index 0000000..58c6a57 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/compat.py @@ -0,0 +1,92 @@ +# originally inspired by "six" by Benjamin Peterson + +import inspect +import sys + + +if sys.version_info < (3,0): + text_type = unicode + binary_type = str + + import StringIO + StringIO = BytesIO = StringIO.StringIO +else: + text_type = str + binary_type = bytes + + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + + +def getargspec_permissive(func): + """ + An `inspect.getargspec` with a relaxed sanity check to support Cython. + + Motivation: + + A Cython-compiled function is *not* an instance of Python's + types.FunctionType. That is the sanity check the standard Py2 + library uses in `inspect.getargspec()`. So, an exception is raised + when calling `argh.dispatch_command(cythonCompiledFunc)`. However, + the CyFunctions do have perfectly usable `.func_code` and + `.func_defaults` which is all `inspect.getargspec` needs. + + This function just copies `inspect.getargspec()` from the standard + library but relaxes the test to a more duck-typing one of having + both `.func_code` and `.func_defaults` attributes. + """ + if inspect.ismethod(func): + func = func.im_func + + # Py2 Stdlib uses isfunction(func) which is too strict for Cython-compiled + # functions though such have perfectly usable func_code, func_defaults. + if not (hasattr(func, "func_code") and hasattr(func, "func_defaults")): + raise TypeError('{!r} missing func_code or func_defaults'.format(func)) + + args, varargs, varkw = inspect.getargs(func.func_code) + return inspect.ArgSpec(args, varargs, varkw, func.func_defaults) + + +if sys.version_info < (3,0): + getargspec = getargspec_permissive +else: + # in Python 3 the basic getargspec doesn't support keyword-only arguments + # and annotations and raises ValueError if they are discovered + getargspec = inspect.getfullargspec + + +class _PrimitiveOrderedDict(dict): + """ + A poor man's OrderedDict replacement for compatibility with Python 2.6. + Implements only the basic features. May easily break if non-overloaded + methods are used. + """ + def __init__(self, *args, **kwargs): + super(_PrimitiveOrderedDict, self).__init__(*args, **kwargs) + self._seq = [] + + def __setitem__(self, key, value): + super(_PrimitiveOrderedDict, self).__setitem__(key, value) + if key not in self._seq: + self._seq.append(key) + + def __delitem__(self, key): + super(_PrimitiveOrderedDict, self).__delitem__(key) + idx = self._seq.index(key) + del self._seq[idx] + + def __iter__(self): + return iter(self._seq) + + def keys(self): + return list(self) + + def values(self): + return [self[k] for k in self] + + +try: + from collections import OrderedDict +except ImportError: + OrderedDict = _PrimitiveOrderedDict diff --git a/flask/venv/lib/python3.6/site-packages/argh/completion.py b/flask/venv/lib/python3.6/site-packages/argh/completion.py new file mode 100644 index 0000000..01b0818 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/completion.py @@ -0,0 +1,94 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Shell completion +~~~~~~~~~~~~~~~~ + +Command and argument completion is a great way to reduce the number of +keystrokes and improve user experience. + +To display suggestions when you press :kbd:`tab`, a shell must obtain choices +from your program. It calls the program in a specific environment and expects +it to return a list of relevant choices. + +`Argparse` does not support completion out of the box. However, there are +3rd-party apps that do the job, such as argcomplete_ and +python-selfcompletion_. + +`Argh` supports only argcomplete_ which doesn't require subclassing +the parser and monkey-patches it instead. Combining `Argh` +with python-selfcompletion_ isn't much harder though: simply use +`SelfCompletingArgumentParser` instead of vanilla `ArgumentParser`. + +See installation details and gotchas in the documentation of the 3rd-party app +you've chosen for the completion backend. + +`Argh` automatically enables completion if argcomplete_ is available +(see :attr:`COMPLETION_ENABLED`). If completion is undesirable in given app by +design, it can be turned off by setting ``completion=False`` +in :func:`argh.dispatching.dispatch`. + +Note that you don't *have* to add completion via `Argh`; it doesn't matter +whether you let it do it for you or use the underlying API. + +.. _argcomplete: https://github.com/kislyuk/argcomplete +.. _python-selfcompletion: https://github.com/dbarnett/python-selfcompletion + +Argument-level completion +------------------------- + +Argcomplete_ supports custom "completers". The documentation suggests adding +the completer as an attribute of the argument parser action:: + + parser.add_argument("--env-var1").completer = EnvironCompleter + +However, this doesn't fit the normal `Argh`-assisted workflow. +It is recommended to use the :func:`~argh.decorators.arg` decorator:: + + @arg('--env-var1', completer=EnvironCompleter) + def func(...): + ... + +""" +import logging +import os + + +COMPLETION_ENABLED = False +""" +Dynamically set to `True` on load if argcomplete_ was successfully imported. +""" + +try: + import argcomplete +except ImportError: + pass +else: + COMPLETION_ENABLED = True + + +__all__ = ['autocomplete', 'COMPLETION_ENABLED'] + + +logger = logging.getLogger(__package__) + + +def autocomplete(parser): + """ + Adds support for shell completion via argcomplete_ by patching given + `argparse.ArgumentParser` (sub)class. + + If completion is not enabled, logs a debug-level message. + """ + if COMPLETION_ENABLED: + argcomplete.autocomplete(parser) + elif 'bash' in os.getenv('SHELL', ''): + logger.debug('Bash completion not available. Install argcomplete.') diff --git a/flask/venv/lib/python3.6/site-packages/argh/constants.py b/flask/venv/lib/python3.6/site-packages/argh/constants.py new file mode 100644 index 0000000..f295944 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/constants.py @@ -0,0 +1,103 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +import argparse + +__all__ = ( + 'ATTR_NAME', 'ATTR_ALIASES', 'ATTR_ARGS', 'ATTR_WRAPPED_EXCEPTIONS', + 'ATTR_WRAPPED_EXCEPTIONS_PROCESSOR', 'ATTR_EXPECTS_NAMESPACE_OBJECT', + 'PARSER_FORMATTER', 'DEFAULT_ARGUMENT_TEMPLATE', 'DEST_FUNCTION', +) + + +# +# Names of function attributes where Argh stores command behaviour +# + +#: explicit command name (differing from function name) +ATTR_NAME = 'argh_name' + +#: alternative command names +ATTR_ALIASES = 'argh_aliases' + +#: declared arguments +ATTR_ARGS = 'argh_args' + +#: list of exception classes that should be wrapped and printed as results +ATTR_WRAPPED_EXCEPTIONS = 'argh_wrap_errors' + +#: a function to preprocess the exception object when it is wrapped +ATTR_WRAPPED_EXCEPTIONS_PROCESSOR = 'argh_wrap_errors_processor' + +#: forcing argparse.Namespace object instead of signature introspection +ATTR_EXPECTS_NAMESPACE_OBJECT = 'argh_expects_namespace_object' + +# +# Dest names in parser defaults +# + +#: dest name for a function mapped to given endpoint (goes to Namespace obj) +DEST_FUNCTION = 'function' + +# +# Other library-wide stuff +# + +class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter, + argparse.RawDescriptionHelpFormatter): + def _expand_help(self, action): + """ + This method is copied verbatim from ArgumentDefaultsHelpFormatter with + a couple of lines added just before the end. Reason: we need to + `repr()` default values instead of simply inserting them as is. + This helps notice, for example, an empty string as the default value; + moreover, it prevents breaking argparse due to logical quirks inside + of its formatters. + + Ideally this could be achieved by simply defining + :attr:`DEFAULT_ARGUMENT_TEMPLATE` as ``{default!r}`` but unfortunately + argparse only supports the old printf syntax. + """ + params = dict(vars(action), prog=self._prog) + for name in list(params): + if params[name] is argparse.SUPPRESS: + del params[name] + for name in list(params): + if hasattr(params[name], '__name__'): + params[name] = params[name].__name__ + if params.get('choices') is not None: + choices_str = ', '.join([str(c) for c in params['choices']]) + params['choices'] = choices_str + + # XXX this is added in Argh vs. argparse.ArgumentDefaultsHelpFormatter + # (avoiding empty strings, otherwise Argparse would die with + # an IndexError in _format_action) + # + if 'default' in params: + if params['default'] is None: + params['default'] = '-' + else: + params['default'] = repr(params['default']) + # + # / + + return self._get_help_string(action) % params + + +#: Default formatter to be used in implicitly instantiated ArgumentParser. +PARSER_FORMATTER = CustomFormatter + + +DEFAULT_ARGUMENT_TEMPLATE = '%(default)s' +""" +Default template of argument help message (see issue #64). +The template ``%(default)s`` is used by `argparse` to display the argument's +default value. +""" diff --git a/flask/venv/lib/python3.6/site-packages/argh/decorators.py b/flask/venv/lib/python3.6/site-packages/argh/decorators.py new file mode 100644 index 0000000..da176e6 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/decorators.py @@ -0,0 +1,195 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Command decorators +~~~~~~~~~~~~~~~~~~ +""" +from argh.constants import (ATTR_ALIASES, ATTR_ARGS, ATTR_NAME, + ATTR_WRAPPED_EXCEPTIONS, + ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, + ATTR_EXPECTS_NAMESPACE_OBJECT) + + +__all__ = ['aliases', 'named', 'arg', 'wrap_errors', 'expects_obj'] + + +def named(new_name): + """ + Sets given string as command name instead of the function name. + The string is used verbatim without further processing. + + Usage:: + + @named('load') + def do_load_some_stuff_and_keep_the_original_function_name(args): + ... + + The resulting command will be available only as ``load``. To add aliases + without renaming the command, check :func:`aliases`. + + .. versionadded:: 0.19 + """ + def wrapper(func): + setattr(func, ATTR_NAME, new_name) + return func + return wrapper + + +def aliases(*names): + """ + Defines alternative command name(s) for given function (along with its + original name). Usage:: + + @aliases('co', 'check') + def checkout(args): + ... + + The resulting command will be available as ``checkout``, ``check`` and ``co``. + + .. note:: + + This decorator only works with a recent version of argparse (see `Python + issue 9324`_ and `Python rev 4c0426`_). Such version ships with + **Python 3.2+** and may be available in other environments as a separate + package. Argh does not issue warnings and simply ignores aliases if + they are not supported. See :attr:`~argh.assembling.SUPPORTS_ALIASES`. + + .. _Python issue 9324: http://bugs.python.org/issue9324 + .. _Python rev 4c0426: http://hg.python.org/cpython/rev/4c0426261148/ + + .. versionadded:: 0.19 + """ + def wrapper(func): + setattr(func, ATTR_ALIASES, names) + return func + return wrapper + + +def arg(*args, **kwargs): + """ + Declares an argument for given function. Does not register the function + anywhere, nor does it modify the function in any way. + + The signature of the decorator matches that of + :meth:`argparse.ArgumentParser.add_argument`, only some keywords are not + required if they can be easily guessed (e.g. you don't have to specify type + or action when an `int` or `bool` default value is supplied). + + Typical use cases: + + - In combination with :func:`expects_obj` (which is not recommended); + - in combination with ordinary function signatures to add details that + cannot be expressed with that syntax (e.g. help message). + + Usage:: + + from argh import arg + + @arg('path', help='path to the file to load') + @arg('--format', choices=['yaml','json']) + @arg('-v', '--verbosity', choices=range(0,3), default=2) + def load(path, something=None, format='json', dry_run=False, verbosity=1): + loaders = {'json': json.load, 'yaml': yaml.load} + loader = loaders[args.format] + data = loader(args.path) + if not args.dry_run: + if verbosity < 1: + print('saving to the database') + put_to_database(data) + + In this example: + + - `path` declaration is extended with `help`; + - `format` declaration is extended with `choices`; + - `dry_run` declaration is not duplicated; + - `verbosity` is extended with `choices` and the default value is + overridden. (If both function signature and `@arg` define a default + value for an argument, `@arg` wins.) + + .. note:: + + It is recommended to avoid using this decorator unless there's no way + to tune the argument's behaviour or presentation using ordinary + function signatures. Readability counts, don't repeat yourself. + + """ + def wrapper(func): + declared_args = getattr(func, ATTR_ARGS, []) + # The innermost decorator is called first but appears last in the code. + # We need to preserve the expected order of positional arguments, so + # the outermost decorator inserts its value before the innermost's: + declared_args.insert(0, dict(option_strings=args, **kwargs)) + setattr(func, ATTR_ARGS, declared_args) + return func + return wrapper + + +def wrap_errors(errors=None, processor=None, *args): + """ + Decorator. Wraps given exceptions into + :class:`~argh.exceptions.CommandError`. Usage:: + + @wrap_errors([AssertionError]) + def foo(x=None, y=None): + assert x or y, 'x or y must be specified' + + If the assertion fails, its message will be correctly printed and the + stack hidden. This helps to avoid boilerplate code. + + :param errors: + A list of exception classes to catch. + :param processor: + A callable that expects the exception object and returns a string. + For example, this renders all wrapped errors in red colour:: + + from termcolor import colored + + def failure(err): + return colored(str(err), 'red') + + @wrap_errors(processor=failure) + def my_command(...): + ... + + """ + + def wrapper(func): + if errors: + setattr(func, ATTR_WRAPPED_EXCEPTIONS, errors) + + if processor: + setattr(func, ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, processor) + + return func + return wrapper + + +def expects_obj(func): + """ + Marks given function as expecting a namespace object. + + Usage:: + + @arg('bar') + @arg('--quux', default=123) + @expects_obj + def foo(args): + yield args.bar, args.quux + + This is equivalent to:: + + def foo(bar, quux=123): + yield bar, quux + + In most cases you don't need this decorator. + """ + setattr(func, ATTR_EXPECTS_NAMESPACE_OBJECT, True) + return func diff --git a/flask/venv/lib/python3.6/site-packages/argh/dispatching.py b/flask/venv/lib/python3.6/site-packages/argh/dispatching.py new file mode 100644 index 0000000..78efc11 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/dispatching.py @@ -0,0 +1,382 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Dispatching +~~~~~~~~~~~ +""" +import argparse +import sys +from types import GeneratorType + +from argh import compat, io +from argh.constants import ( + ATTR_WRAPPED_EXCEPTIONS, + ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, + ATTR_EXPECTS_NAMESPACE_OBJECT, + PARSER_FORMATTER, + DEST_FUNCTION, +) +from argh.completion import autocomplete +from argh.assembling import add_commands, set_default_command +from argh.exceptions import DispatchingError, CommandError +from argh.utils import get_arg_spec + + +__all__ = ['dispatch', 'dispatch_command', 'dispatch_commands', + 'PARSER_FORMATTER', 'EntryPoint'] + + +class ArghNamespace(argparse.Namespace): + """ + A namespace object which collects the stack of functions (the + :attr:`~argh.constants.DEST_FUNCTION` arguments passed to it via + parser's defaults). + """ + def __init__(self, *args, **kw): + super(ArghNamespace, self).__init__(*args, **kw) + self._functions_stack = [] + + def __setattr__(self, k, v): + if k == DEST_FUNCTION: + # don't register the function under DEST_FUNCTION name. + # If `ArgumentParser.parse_known_args()` sees that we already have + # such attribute, it skips it. However, it goes from the topmost + # parser to subparsers. We need the function mapped to the + # subparser. So we fool the `ArgumentParser` and pretend that we + # didn't get a DEST_FUNCTION attribute; however, in fact we collect + # all its values in a stack. The last item in the stack would be + # the function mapped to the innermost parser — the one we need. + self._functions_stack.append(v) + else: + super(ArghNamespace, self).__setattr__(k, v) + + def get_function(self): + return self._functions_stack[-1] + + +def dispatch(parser, argv=None, add_help_command=True, + completion=True, pre_call=None, + output_file=sys.stdout, errors_file=sys.stderr, + raw_output=False, namespace=None, + skip_unknown_args=False): + """ + Parses given list of arguments using given parser, calls the relevant + function and prints the result. + + The target function should expect one positional argument: the + :class:`argparse.Namespace` object. However, if the function is decorated with + :func:`~argh.decorators.plain_signature`, the positional and named + arguments from the namespace object are passed to the function instead + of the object itself. + + :param parser: + + the ArgumentParser instance. + + :param argv: + + a list of strings representing the arguments. If `None`, ``sys.argv`` + is used instead. Default is `None`. + + :param add_help_command: + + if `True`, converts first positional argument "help" to a keyword + argument so that ``help foo`` becomes ``foo --help`` and displays usage + information for "foo". Default is `True`. + + :param output_file: + + A file-like object for output. If `None`, the resulting lines are + collected and returned as a string. Default is ``sys.stdout``. + + :param errors_file: + + Same as `output_file` but for ``sys.stderr``. + + :param raw_output: + + If `True`, results are written to the output file raw, without adding + whitespaces or newlines between yielded strings. Default is `False`. + + :param completion: + + If `True`, shell tab completion is enabled. Default is `True`. (You + will also need to install it.) See :mod:`argh.completion`. + + :param skip_unknown_args: + + If `True`, unknown arguments do not cause an error + (`ArgumentParser.parse_known_args` is used). + + :param namespace: + + An `argparse.Namespace`-like object. By default an + :class:`ArghNamespace` object is used. Please note that support for + combined default and nested functions may be broken if a different + type of object is forced. + + By default the exceptions are not wrapped and will propagate. The only + exception that is always wrapped is :class:`~argh.exceptions.CommandError` + which is interpreted as an expected event so the traceback is hidden. + You can also mark arbitrary exceptions as "wrappable" by using the + :func:`~argh.decorators.wrap_errors` decorator. + """ + if completion: + autocomplete(parser) + + if argv is None: + argv = sys.argv[1:] + + if add_help_command: + if argv and argv[0] == 'help': + argv.pop(0) + argv.append('--help') + + if skip_unknown_args: + parse_args = parser.parse_known_args + else: + parse_args = parser.parse_args + + if not namespace: + namespace = ArghNamespace() + + # this will raise SystemExit if parsing fails + namespace_obj = parse_args(argv, namespace=namespace) + + function = _get_function_from_namespace_obj(namespace_obj) + + if function: + lines = _execute_command(function, namespace_obj, errors_file, + pre_call=pre_call) + else: + # no commands declared, can't dispatch; display help message + lines = [parser.format_usage()] + + if output_file is None: + # user wants a string; we create an internal temporary file-like object + # and will return its contents as a string + if sys.version_info < (3,0): + f = compat.BytesIO() + else: + f = compat.StringIO() + else: + # normally this is stdout; can be any file + f = output_file + + for line in lines: + # print the line as soon as it is generated to ensure that it is + # displayed to the user before anything else happens, e.g. + # raw_input() is called + + io.dump(line, f) + if not raw_output: + # in most cases user wants one message per line + io.dump('\n', f) + + if output_file is None: + # user wanted a string; return contents of our temporary file-like obj + f.seek(0) + return f.read() + + +def _get_function_from_namespace_obj(namespace_obj): + if isinstance(namespace_obj, ArghNamespace): + # our special namespace object keeps the stack of assigned functions + try: + function = namespace_obj.get_function() + except (AttributeError, IndexError): + return None + else: + # a custom (probably vanilla) namespace object keeps the last assigned + # function; this may be wrong but at least something may work + if not hasattr(namespace_obj, DEST_FUNCTION): + return None + function = getattr(namespace_obj, DEST_FUNCTION) + + if not function or not hasattr(function, '__call__'): + return None + + return function + + +def _execute_command(function, namespace_obj, errors_file, pre_call=None): + """ + Assumes that `function` is a callable. Tries different approaches + to call it (with `namespace_obj` or with ordinary signature). + Yields the results line by line. + + If :class:`~argh.exceptions.CommandError` is raised, its message is + appended to the results (i.e. yielded by the generator as a string). + All other exceptions propagate unless marked as wrappable + by :func:`wrap_errors`. + """ + if pre_call: # XXX undocumented because I'm unsure if it's OK + # Actually used in real projects: + # * https://google.com/search?q=argh+dispatch+pre_call + # * https://github.com/neithere/argh/issues/63 + pre_call(namespace_obj) + + # the function is nested to catch certain exceptions (see below) + def _call(): + # Actually call the function + if getattr(function, ATTR_EXPECTS_NAMESPACE_OBJECT, False): + result = function(namespace_obj) + else: + # namespace -> dictionary + _flat_key = lambda key: key.replace('-', '_') + all_input = dict((_flat_key(k), v) + for k,v in vars(namespace_obj).items()) + + # filter the namespace variables so that only those expected + # by the actual function will pass + + spec = get_arg_spec(function) + + positional = [all_input[k] for k in spec.args] + kwonly = getattr(spec, 'kwonlyargs', []) + keywords = dict((k, all_input[k]) for k in kwonly) + + # *args + if spec.varargs: + positional += getattr(namespace_obj, spec.varargs) + + # **kwargs + varkw = getattr(spec, 'varkw', getattr(spec, 'keywords', [])) + if varkw: + not_kwargs = [DEST_FUNCTION] + spec.args + [spec.varargs] + kwonly + for k in vars(namespace_obj): + if k.startswith('_') or k in not_kwargs: + continue + keywords[k] = getattr(namespace_obj, k) + + result = function(*positional, **keywords) + + # Yield the results + if isinstance(result, (GeneratorType, list, tuple)): + # yield each line ASAP, convert CommandError message to a line + for line in result: + yield line + else: + # yield non-empty non-iterable result as a single line + if result is not None: + yield result + + wrappable_exceptions = [CommandError] + wrappable_exceptions += getattr(function, ATTR_WRAPPED_EXCEPTIONS, []) + + try: + result = _call() + for line in result: + yield line + except tuple(wrappable_exceptions) as e: + processor = getattr(function, ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, + lambda e: '{0.__class__.__name__}: {0}'.format(e)) + + errors_file.write(compat.text_type(processor(e))) + errors_file.write('\n') + + +def dispatch_command(function, *args, **kwargs): + """ + A wrapper for :func:`dispatch` that creates a one-command parser. + Uses :attr:`PARSER_FORMATTER`. + + This:: + + dispatch_command(foo) + + ...is a shortcut for:: + + parser = ArgumentParser() + set_default_command(parser, foo) + dispatch(parser) + + This function can be also used as a decorator. + """ + parser = argparse.ArgumentParser(formatter_class=PARSER_FORMATTER) + set_default_command(parser, function) + dispatch(parser, *args, **kwargs) + + +def dispatch_commands(functions, *args, **kwargs): + """ + A wrapper for :func:`dispatch` that creates a parser, adds commands to + the parser and dispatches them. + Uses :attr:`PARSER_FORMATTER`. + + This:: + + dispatch_commands([foo, bar]) + + ...is a shortcut for:: + + parser = ArgumentParser() + add_commands(parser, [foo, bar]) + dispatch(parser) + + """ + parser = argparse.ArgumentParser(formatter_class=PARSER_FORMATTER) + add_commands(parser, functions) + dispatch(parser, *args, **kwargs) + + +class EntryPoint(object): + """ + An object to which functions can be attached and then dispatched. + + When called with an argument, the argument (a function) is registered + at this entry point as a command. + + When called without an argument, dispatching is triggered with all + previously registered commands. + + Usage:: + + from argh import EntryPoint + + app = EntryPoint('main', dict(description='This is a cool app')) + + @app + def ls(): + for i in range(10): + print i + + @app + def greet(): + print 'hello' + + if __name__ == '__main__': + app() + + """ + def __init__(self, name=None, parser_kwargs=None): + self.name = name or 'unnamed' + self.commands = [] + self.parser_kwargs = parser_kwargs or {} + + def __call__(self, f=None): + if f: + self._register_command(f) + return f + + return self._dispatch() + + def _register_command(self, f): + self.commands.append(f) + + def _dispatch(self): + if not self.commands: + raise DispatchingError('no commands for entry point "{0}"' + .format(self.name)) + + parser = argparse.ArgumentParser(**self.parser_kwargs) + add_commands(parser, self.commands) + dispatch(parser) diff --git a/flask/venv/lib/python3.6/site-packages/argh/exceptions.py b/flask/venv/lib/python3.6/site-packages/argh/exceptions.py new file mode 100644 index 0000000..e331826 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/exceptions.py @@ -0,0 +1,56 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Exceptions +~~~~~~~~~~ +""" +class AssemblingError(Exception): + """ + Raised if the parser could not be configured due to malformed + or conflicting command declarations. + """ + + +class DispatchingError(Exception): + """ + Raised if the dispatching could not be completed due to misconfiguration + which could not be determined on an earlier stage. + """ + + +class CommandError(Exception): + """ + Intended to be raised from within a command. The dispatcher wraps this + exception by default and prints its message without traceback. + + Useful for print-and-exit tasks when you expect a failure and don't want + to startle the ordinary user by the cryptic output. + + Consider the following example:: + + def foo(args): + try: + ... + except KeyError as e: + print(u'Could not fetch item: {0}'.format(e)) + return + + It is exactly the same as:: + + def bar(args): + try: + ... + except KeyError as e: + raise CommandError(u'Could not fetch item: {0}'.format(e)) + + This exception can be safely used in both print-style and yield-style + commands (see :doc:`tutorial`). + """ diff --git a/flask/venv/lib/python3.6/site-packages/argh/helpers.py b/flask/venv/lib/python3.6/site-packages/argh/helpers.py new file mode 100644 index 0000000..3e626d8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/helpers.py @@ -0,0 +1,64 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Helpers +~~~~~~~ +""" +import argparse + +from argh.completion import autocomplete +from argh.assembling import add_commands, set_default_command +from argh.dispatching import PARSER_FORMATTER, ArghNamespace, dispatch + + +__all__ = ['ArghParser'] + + +class ArghParser(argparse.ArgumentParser): + """ + A subclass of :class:`ArgumentParser` with support for and a couple + of convenience methods. + + All methods are but wrappers for stand-alone functions + :func:`~argh.assembling.add_commands`, + :func:`~argh.completion.autocomplete` and + :func:`~argh.dispatching.dispatch`. + + Uses :attr:`~argh.dispatching.PARSER_FORMATTER`. + """ + def __init__(self, *args, **kwargs): + kwargs.setdefault('formatter_class', PARSER_FORMATTER) + super(ArghParser, self).__init__(*args, **kwargs) + + def set_default_command(self, *args, **kwargs): + "Wrapper for :func:`~argh.assembling.set_default_command`." + return set_default_command(self, *args, **kwargs) + + def add_commands(self, *args, **kwargs): + "Wrapper for :func:`~argh.assembling.add_commands`." + return add_commands(self, *args, **kwargs) + + def autocomplete(self): + "Wrapper for :func:`~argh.completion.autocomplete`." + return autocomplete(self) + + def dispatch(self, *args, **kwargs): + "Wrapper for :func:`~argh.dispatching.dispatch`." + return dispatch(self, *args, **kwargs) + + def parse_args(self, args=None, namespace=None): + """ + Wrapper for :meth:`argparse.ArgumentParser.parse_args`. If `namespace` + is not defined, :class:`argh.dispatching.ArghNamespace` is used. + This is required for functions to be properly used as commands. + """ + namespace = namespace or ArghNamespace() + return super(ArghParser, self).parse_args(args, namespace) diff --git a/flask/venv/lib/python3.6/site-packages/argh/interaction.py b/flask/venv/lib/python3.6/site-packages/argh/interaction.py new file mode 100644 index 0000000..f368ab4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/interaction.py @@ -0,0 +1,84 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Interaction +~~~~~~~~~~~ +""" +from argh.compat import text_type +from argh.io import safe_input + + +__all__ = ['confirm', 'safe_input'] + + +def confirm(action, default=None, skip=False): + """ + A shortcut for typical confirmation prompt. + + :param action: + + a string describing the action, e.g. "Apply changes". A question mark + will be appended. + + :param default: + + `bool` or `None`. Determines what happens when user hits :kbd:`Enter` + without typing in a choice. If `True`, default choice is "yes". If + `False`, it is "no". If `None` the prompt keeps reappearing until user + types in a choice (not necessarily acceptable) or until the number of + iteration reaches the limit. Default is `None`. + + :param skip: + + `bool`; if `True`, no interactive prompt is used and default choice is + returned (useful for batch mode). Default is `False`. + + Usage:: + + def delete(key, silent=False): + item = db.get(Item, args.key) + if confirm('Delete '+item.title, default=True, skip=silent): + item.delete() + print('Item deleted.') + else: + print('Operation cancelled.') + + Returns `None` on `KeyboardInterrupt` event. + """ + MAX_ITERATIONS = 3 + if skip: + return default + else: + defaults = { + None: ('y','n'), + True: ('Y','n'), + False: ('y','N'), + } + y, n = defaults[default] + prompt = text_type('{action}? ({y}/{n})').format(**locals()) + choice = None + try: + if default is None: + cnt = 1 + while not choice and cnt < MAX_ITERATIONS: + choice = safe_input(prompt) + cnt += 1 + else: + choice = safe_input(prompt) + except KeyboardInterrupt: + return None + if choice in ('yes', 'y', 'Y'): + return True + if choice in ('no', 'n', 'N'): + return False + if default is not None: + return default + return None diff --git a/flask/venv/lib/python3.6/site-packages/argh/io.py b/flask/venv/lib/python3.6/site-packages/argh/io.py new file mode 100644 index 0000000..35e9006 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/io.py @@ -0,0 +1,105 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Output Processing +~~~~~~~~~~~~~~~~~ +""" +import locale +import sys + +from argh import compat + + +__all__ = ['dump', 'encode_output', 'safe_input'] + + +def _input(prompt): + # this function can be mocked up in tests + if sys.version_info < (3,0): + return raw_input(prompt) + else: + return input(prompt) + + +def safe_input(prompt): + """ + Prompts user for input. Correctly handles prompt message encoding. + """ + + if sys.version_info < (3,0): + if isinstance(prompt, compat.text_type): + # Python 2.x: unicode → bytes + encoding = locale.getpreferredencoding() or 'utf-8' + prompt = prompt.encode(encoding) + else: + if not isinstance(prompt, compat.text_type): + # Python 3.x: bytes → unicode + prompt = prompt.decode() + + return _input(prompt) + + +def encode_output(value, output_file): + """ + Encodes given value so it can be written to given file object. + + Value may be Unicode, binary string or any other data type. + + The exact behaviour depends on the Python version: + + Python 3.x + + `sys.stdout` is a `_io.TextIOWrapper` instance that accepts `str` + (unicode) and breaks on `bytes`. + + It is OK to simply assume that everything is Unicode unless special + handling is introduced in the client code. + + Thus, no additional processing is performed. + + Python 2.x + + `sys.stdout` is a file-like object that accepts `str` (bytes) + and breaks when `unicode` is passed to `sys.stdout.write()`. + + We can expect both Unicode and bytes. They need to be encoded so as + to match the file object encoding. + + The output is binary if the object doesn't explicitly require Unicode. + + """ + if sys.version_info > (3,0): + # Python 3: whatever → unicode + return compat.text_type(value) + else: + # Python 2: handle special cases + stream_encoding = getattr(output_file, 'encoding', None) + if stream_encoding: + if stream_encoding.upper() == 'UTF-8': + return compat.text_type(value) + else: + return value.encode(stream_encoding, 'ignore') + else: + # no explicit encoding requirements; force binary + if isinstance(value, compat.text_type): + # unicode → binary + return value.encode('utf-8') + else: + return str(value) + + +def dump(raw_data, output_file): + """ + Writes given line to given output file. + See :func:`encode_output` for details. + """ + data = encode_output(raw_data, output_file) + output_file.write(data) diff --git a/flask/venv/lib/python3.6/site-packages/argh/utils.py b/flask/venv/lib/python3.6/site-packages/argh/utils.py new file mode 100644 index 0000000..8650bb7 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/argh/utils.py @@ -0,0 +1,55 @@ +# coding: utf-8 +# +# Copyright © 2010—2014 Andrey Mikhaylenko and contributors +# +# This file is part of Argh. +# +# Argh is free software under terms of the GNU Lesser +# General Public License version 3 (LGPLv3) as published by the Free +# Software Foundation. See the file README.rst for copying conditions. +# +""" +Utilities +~~~~~~~~~ +""" +import argparse +import inspect + +from argh import compat + + +def get_subparsers(parser, create=False): + """ + Returns the :class:`argparse._SubParsersAction` instance for given + :class:`ArgumentParser` instance as would have been returned by + :meth:`ArgumentParser.add_subparsers`. The problem with the latter is that + it only works once and raises an exception on the second attempt, and the + public API seems to lack a method to get *existing* subparsers. + + :param create: + If `True`, creates the subparser if it does not exist. Default if + `False`. + + """ + # note that ArgumentParser._subparsers is *not* what is returned by + # ArgumentParser.add_subparsers(). + if parser._subparsers: + actions = [a for a in parser._actions + if isinstance(a, argparse._SubParsersAction)] + assert len(actions) == 1 + return actions[0] + else: + if create: + return parser.add_subparsers() + + +def get_arg_spec(function): + """ + Returns argument specification for given function. Omits special + arguments of instance methods (`self`) and static methods (usually `cls` + or something like this). + """ + spec = compat.getargspec(function) + if inspect.ismethod(function): + spec = spec._replace(args=spec.args[1:]) + return spec diff --git a/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/PKG-INFO b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/PKG-INFO new file mode 100644 index 0000000..7b58794 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/PKG-INFO @@ -0,0 +1,101 @@ +Metadata-Version: 1.1 +Name: blinker +Version: 1.4 +Summary: Fast, simple object-to-object and broadcast signaling +Home-page: http://pythonhosted.org/blinker/ +Author: Jason Kirtland +Author-email: jek@discorporate.us +License: MIT License +Description: [![Build Status](https://travis-ci.org/jek/blinker.svg?branch=master)](https://travis-ci.org/jek/blinker) + + + # Blinker + + Blinker provides a fast dispatching system that allows any number of + interested parties to subscribe to events, or "signals". + + Signal receivers can subscribe to specific senders or receive signals + sent by any sender. + + >>> from blinker import signal + >>> started = signal('round-started') + >>> def each(round): + ... print "Round %s!" % round + ... + >>> started.connect(each) + + >>> def round_two(round): + ... print "This is round two." + ... + >>> started.connect(round_two, sender=2) + + >>> for round in range(1, 4): + ... started.send(round) + ... + Round 1! + Round 2! + This is round two. + Round 3! + + See the [Blinker documentation](https://pythonhosted.org/blinker/) for more information. + + ## Requirements + + Blinker requires Python 2.4 or higher, Python 3.0 or higher, or Jython 2.5 or higher. + + ## Changelog Summary + + 1.3 (July 3, 2013) + + - The global signal stash behind blinker.signal() is now backed by a + regular name-to-Signal dictionary. Previously, weak references were + held in the mapping and ephemeral usage in code like + ``signal('foo').connect(...)`` could have surprising program behavior + depending on import order of modules. + - blinker.Namespace is now built on a regular dict. Use + blinker.WeakNamespace for the older, weak-referencing behavior. + - Signal.connect('text-sender') uses an alternate hashing strategy to + avoid sharp edges in text identity. + + 1.2 (October 26, 2011) + + - Added Signal.receiver_connected and Signal.receiver_disconnected + per-Signal signals. + - Deprecated the global 'receiver_connected' signal. + - Verified Python 3.2 support (no changes needed!) + + 1.1 (July 21, 2010) + + - Added ``@signal.connect_via(sender)`` decorator + - Added ``signal.connected_to`` shorthand name for the + ``temporarily_connected_to`` context manager. + + 1.0 (March 28, 2010) + + - Python 3.x compatibility + + 0.9 (February 26, 2010) + + - Sphinx docs, project website + - Added ``with a_signal.temporarily_connected_to(receiver): ...`` support + +Keywords: signal emit events broadcast +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.4 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities diff --git a/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/SOURCES.txt b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..8a2dace --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/SOURCES.txt @@ -0,0 +1,49 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.md +setup.cfg +setup.py +blinker/__init__.py +blinker/_saferef.py +blinker/_utilities.py +blinker/base.py +blinker.egg-info/PKG-INFO +blinker.egg-info/SOURCES.txt +blinker.egg-info/dependency_links.txt +blinker.egg-info/top_level.txt +docs/html/genindex.html +docs/html/index.html +docs/html/objects.inv +docs/html/search.html +docs/html/searchindex.js +docs/html/_sources/index.txt +docs/html/_static/basic.css +docs/html/_static/blinker-named.png +docs/html/_static/blinker64.png +docs/html/_static/comment-bright.png +docs/html/_static/comment-close.png +docs/html/_static/comment.png +docs/html/_static/doctools.js +docs/html/_static/down-pressed.png +docs/html/_static/down.png +docs/html/_static/file.png +docs/html/_static/flasky.css +docs/html/_static/jquery.js +docs/html/_static/minus.png +docs/html/_static/plus.png +docs/html/_static/pygments.css +docs/html/_static/searchtools.js +docs/html/_static/underscore.js +docs/html/_static/up-pressed.png +docs/html/_static/up.png +docs/html/_static/websupport.js +docs/source/conf.py +docs/source/index.rst +docs/source/_themes/flask_theme_support.py +docs/text/index.txt +tests/test_context.py +tests/test_saferef.py +tests/test_signals.py +tests/test_utilities.py \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/dependency_links.txt b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/installed-files.txt b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/installed-files.txt new file mode 100644 index 0000000..08c8af0 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/installed-files.txt @@ -0,0 +1,12 @@ +../blinker/__init__.py +../blinker/__pycache__/__init__.cpython-36.pyc +../blinker/__pycache__/_saferef.cpython-36.pyc +../blinker/__pycache__/_utilities.cpython-36.pyc +../blinker/__pycache__/base.cpython-36.pyc +../blinker/_saferef.py +../blinker/_utilities.py +../blinker/base.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/top_level.txt new file mode 100644 index 0000000..1ff4ca5 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker-1.4.egg-info/top_level.txt @@ -0,0 +1 @@ +blinker diff --git a/flask/venv/lib/python3.6/site-packages/blinker/__init__.py b/flask/venv/lib/python3.6/site-packages/blinker/__init__.py new file mode 100644 index 0000000..3ea239c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker/__init__.py @@ -0,0 +1,22 @@ +from blinker.base import ( + ANY, + NamedSignal, + Namespace, + Signal, + WeakNamespace, + receiver_connected, + signal, +) + +__all__ = [ + 'ANY', + 'NamedSignal', + 'Namespace', + 'Signal', + 'WeakNamespace', + 'receiver_connected', + 'signal', + ] + + +__version__ = '1.4' diff --git a/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..4e6f99b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_saferef.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_saferef.cpython-36.pyc new file mode 100644 index 0000000..b6d649b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_saferef.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_utilities.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_utilities.cpython-36.pyc new file mode 100644 index 0000000..f96c26f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/_utilities.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/base.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/base.cpython-36.pyc new file mode 100644 index 0000000..476c766 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/blinker/__pycache__/base.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/blinker/_saferef.py b/flask/venv/lib/python3.6/site-packages/blinker/_saferef.py new file mode 100644 index 0000000..269e362 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker/_saferef.py @@ -0,0 +1,234 @@ +# extracted from Louie, http://pylouie.org/ +# updated for Python 3 +# +# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher, +# Matthew R. Scott +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +"""Refactored 'safe reference from dispatcher.py""" + +import operator +import sys +import traceback +import weakref + + +try: + callable +except NameError: + def callable(object): + return hasattr(object, '__call__') + + +if sys.version_info < (3,): + get_self = operator.attrgetter('im_self') + get_func = operator.attrgetter('im_func') +else: + get_self = operator.attrgetter('__self__') + get_func = operator.attrgetter('__func__') + + +def safe_ref(target, on_delete=None): + """Return a *safe* weak reference to a callable target. + + - ``target``: The object to be weakly referenced, if it's a bound + method reference, will create a BoundMethodWeakref, otherwise + creates a simple weakref. + + - ``on_delete``: If provided, will have a hard reference stored to + the callable to be called after the safe reference goes out of + scope with the reference object, (either a weakref or a + BoundMethodWeakref) as argument. + """ + try: + im_self = get_self(target) + except AttributeError: + if callable(on_delete): + return weakref.ref(target, on_delete) + else: + return weakref.ref(target) + else: + if im_self is not None: + # Turn a bound method into a BoundMethodWeakref instance. + # Keep track of these instances for lookup by disconnect(). + assert hasattr(target, 'im_func') or hasattr(target, '__func__'), ( + "safe_ref target %r has im_self, but no im_func, " + "don't know how to create reference" % target) + reference = BoundMethodWeakref(target=target, on_delete=on_delete) + return reference + + +class BoundMethodWeakref(object): + """'Safe' and reusable weak references to instance methods. + + BoundMethodWeakref objects provide a mechanism for referencing a + bound method without requiring that the method object itself + (which is normally a transient object) is kept alive. Instead, + the BoundMethodWeakref object keeps weak references to both the + object and the function which together define the instance method. + + Attributes: + + - ``key``: The identity key for the reference, calculated by the + class's calculate_key method applied to the target instance method. + + - ``deletion_methods``: Sequence of callable objects taking single + argument, a reference to this object which will be called when + *either* the target object or target function is garbage + collected (i.e. when this object becomes invalid). These are + specified as the on_delete parameters of safe_ref calls. + + - ``weak_self``: Weak reference to the target object. + + - ``weak_func``: Weak reference to the target function. + + Class Attributes: + + - ``_all_instances``: Class attribute pointing to all live + BoundMethodWeakref objects indexed by the class's + calculate_key(target) method applied to the target objects. + This weak value dictionary is used to short-circuit creation so + that multiple references to the same (object, function) pair + produce the same BoundMethodWeakref instance. + """ + + _all_instances = weakref.WeakValueDictionary() + + def __new__(cls, target, on_delete=None, *arguments, **named): + """Create new instance or return current instance. + + Basically this method of construction allows us to + short-circuit creation of references to already- referenced + instance methods. The key corresponding to the target is + calculated, and if there is already an existing reference, + that is returned, with its deletion_methods attribute updated. + Otherwise the new instance is created and registered in the + table of already-referenced methods. + """ + key = cls.calculate_key(target) + current = cls._all_instances.get(key) + if current is not None: + current.deletion_methods.append(on_delete) + return current + else: + base = super(BoundMethodWeakref, cls).__new__(cls) + cls._all_instances[key] = base + base.__init__(target, on_delete, *arguments, **named) + return base + + def __init__(self, target, on_delete=None): + """Return a weak-reference-like instance for a bound method. + + - ``target``: The instance-method target for the weak reference, + must have im_self and im_func attributes and be + reconstructable via the following, which is true of built-in + instance methods:: + + target.im_func.__get__( target.im_self ) + + - ``on_delete``: Optional callback which will be called when + this weak reference ceases to be valid (i.e. either the + object or the function is garbage collected). Should take a + single argument, which will be passed a pointer to this + object. + """ + def remove(weak, self=self): + """Set self.isDead to True when method or instance is destroyed.""" + methods = self.deletion_methods[:] + del self.deletion_methods[:] + try: + del self.__class__._all_instances[self.key] + except KeyError: + pass + for function in methods: + try: + if callable(function): + function(self) + except Exception: + try: + traceback.print_exc() + except AttributeError: + e = sys.exc_info()[1] + print ('Exception during saferef %s ' + 'cleanup function %s: %s' % (self, function, e)) + self.deletion_methods = [on_delete] + self.key = self.calculate_key(target) + im_self = get_self(target) + im_func = get_func(target) + self.weak_self = weakref.ref(im_self, remove) + self.weak_func = weakref.ref(im_func, remove) + self.self_name = str(im_self) + self.func_name = str(im_func.__name__) + + def calculate_key(cls, target): + """Calculate the reference key for this reference. + + Currently this is a two-tuple of the id()'s of the target + object and the target function respectively. + """ + return (id(get_self(target)), id(get_func(target))) + calculate_key = classmethod(calculate_key) + + def __str__(self): + """Give a friendly representation of the object.""" + return "%s(%s.%s)" % ( + self.__class__.__name__, + self.self_name, + self.func_name, + ) + + __repr__ = __str__ + + def __nonzero__(self): + """Whether we are still a valid reference.""" + return self() is not None + + def __cmp__(self, other): + """Compare with another reference.""" + if not isinstance(other, self.__class__): + return cmp(self.__class__, type(other)) + return cmp(self.key, other.key) + + def __call__(self): + """Return a strong reference to the bound method. + + If the target cannot be retrieved, then will return None, + otherwise returns a bound instance method for our object and + function. + + Note: You may call this method any number of times, as it does + not invalidate the reference. + """ + target = self.weak_self() + if target is not None: + function = self.weak_func() + if function is not None: + return function.__get__(target) + return None diff --git a/flask/venv/lib/python3.6/site-packages/blinker/_utilities.py b/flask/venv/lib/python3.6/site-packages/blinker/_utilities.py new file mode 100644 index 0000000..056270d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker/_utilities.py @@ -0,0 +1,163 @@ +from weakref import ref + +from blinker._saferef import BoundMethodWeakref + + +try: + callable +except NameError: + def callable(object): + return hasattr(object, '__call__') + + +try: + from collections import defaultdict +except: + class defaultdict(dict): + + def __init__(self, default_factory=None, *a, **kw): + if (default_factory is not None and + not hasattr(default_factory, '__call__')): + raise TypeError('first argument must be callable') + dict.__init__(self, *a, **kw) + self.default_factory = default_factory + + def __getitem__(self, key): + try: + return dict.__getitem__(self, key) + except KeyError: + return self.__missing__(key) + + def __missing__(self, key): + if self.default_factory is None: + raise KeyError(key) + self[key] = value = self.default_factory() + return value + + def __reduce__(self): + if self.default_factory is None: + args = tuple() + else: + args = self.default_factory, + return type(self), args, None, None, self.items() + + def copy(self): + return self.__copy__() + + def __copy__(self): + return type(self)(self.default_factory, self) + + def __deepcopy__(self, memo): + import copy + return type(self)(self.default_factory, + copy.deepcopy(self.items())) + + def __repr__(self): + return 'defaultdict(%s, %s)' % (self.default_factory, + dict.__repr__(self)) + + +try: + from contextlib import contextmanager +except ImportError: + def contextmanager(fn): + def oops(*args, **kw): + raise RuntimeError("Python 2.5 or above is required to use " + "context managers.") + oops.__name__ = fn.__name__ + return oops + +class _symbol(object): + + def __init__(self, name): + """Construct a new named symbol.""" + self.__name__ = self.name = name + + def __reduce__(self): + return symbol, (self.name,) + + def __repr__(self): + return self.name +_symbol.__name__ = 'symbol' + + +class symbol(object): + """A constant symbol. + + >>> symbol('foo') is symbol('foo') + True + >>> symbol('foo') + foo + + A slight refinement of the MAGICCOOKIE=object() pattern. The primary + advantage of symbol() is its repr(). They are also singletons. + + Repeated calls of symbol('name') will all return the same instance. + + """ + symbols = {} + + def __new__(cls, name): + try: + return cls.symbols[name] + except KeyError: + return cls.symbols.setdefault(name, _symbol(name)) + + +try: + text = (str, unicode) +except NameError: + text = str + + +def hashable_identity(obj): + if hasattr(obj, '__func__'): + return (id(obj.__func__), id(obj.__self__)) + elif hasattr(obj, 'im_func'): + return (id(obj.im_func), id(obj.im_self)) + elif isinstance(obj, text): + return obj + else: + return id(obj) + + +WeakTypes = (ref, BoundMethodWeakref) + + +class annotatable_weakref(ref): + """A weakref.ref that supports custom instance attributes.""" + + +def reference(object, callback=None, **annotations): + """Return an annotated weak ref.""" + if callable(object): + weak = callable_reference(object, callback) + else: + weak = annotatable_weakref(object, callback) + for key, value in annotations.items(): + setattr(weak, key, value) + return weak + + +def callable_reference(object, callback=None): + """Return an annotated weak ref, supporting bound instance methods.""" + if hasattr(object, 'im_self') and object.im_self is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + elif hasattr(object, '__self__') and object.__self__ is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + return annotatable_weakref(object, callback) + + +class lazy_property(object): + """A @property that is only evaluated once.""" + + def __init__(self, deferred): + self._deferred = deferred + self.__doc__ = deferred.__doc__ + + def __get__(self, obj, cls): + if obj is None: + return self + value = self._deferred(obj) + setattr(obj, self._deferred.__name__, value) + return value diff --git a/flask/venv/lib/python3.6/site-packages/blinker/base.py b/flask/venv/lib/python3.6/site-packages/blinker/base.py new file mode 100644 index 0000000..cc5880e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/blinker/base.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8; fill-column: 76 -*- +"""Signals and events. + +A small implementation of signals, inspired by a snippet of Django signal +API client code seen in a blog post. Signals are first-class objects and +each manages its own receivers and message emission. + +The :func:`signal` function provides singleton behavior for named signals. + +""" +from warnings import warn +from weakref import WeakValueDictionary + +from blinker._utilities import ( + WeakTypes, + contextmanager, + defaultdict, + hashable_identity, + lazy_property, + reference, + symbol, + ) + + +ANY = symbol('ANY') +ANY.__doc__ = 'Token for "any sender".' +ANY_ID = 0 + + +class Signal(object): + """A notification emitter.""" + + #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY`` + #: without an additional import. + ANY = ANY + + @lazy_property + def receiver_connected(self): + """Emitted after each :meth:`connect`. + + The signal sender is the signal instance, and the :meth:`connect` + arguments are passed through: *receiver*, *sender*, and *weak*. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver connects.") + + @lazy_property + def receiver_disconnected(self): + """Emitted after :meth:`disconnect`. + + The sender is the signal instance, and the :meth:`disconnect` arguments + are passed through: *receiver* and *sender*. + + Note, this signal is emitted **only** when :meth:`disconnect` is + called explicitly. + + The disconnect signal can not be emitted by an automatic disconnect + (due to a weakly referenced receiver or sender going out of scope), + as the receiver and/or sender instances are no longer available for + use at the time this signal would be emitted. + + An alternative approach is available by subscribing to + :attr:`receiver_connected` and setting up a custom weakref cleanup + callback on weak receivers and senders. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver disconnects.") + + def __init__(self, doc=None): + """ + :param doc: optional. If provided, will be assigned to the signal's + __doc__ attribute. + + """ + if doc: + self.__doc__ = doc + #: A mapping of connected receivers. + #: + #: The values of this mapping are not meaningful outside of the + #: internal :class:`Signal` implementation, however the boolean value + #: of the mapping is useful as an extremely efficient check to see if + #: any receivers are connected to the signal. + self.receivers = {} + self._by_receiver = defaultdict(set) + self._by_sender = defaultdict(set) + self._weak_senders = {} + + def connect(self, receiver, sender=ANY, weak=True): + """Connect *receiver* to signal events sent by *sender*. + + :param receiver: A callable. Will be invoked by :meth:`send` with + `sender=` as a single positional argument and any \*\*kwargs that + were provided to a call to :meth:`send`. + + :param sender: Any object or :obj:`ANY`, defaults to ``ANY``. + Restricts notifications delivered to *receiver* to only those + :meth:`send` emissions sent by *sender*. If ``ANY``, the receiver + will always be notified. A *receiver* may be connected to + multiple *sender* values on the same Signal through multiple calls + to :meth:`connect`. + + :param weak: If true, the Signal will hold a weakref to *receiver* + and automatically disconnect when *receiver* goes out of scope or + is garbage collected. Defaults to True. + + """ + receiver_id = hashable_identity(receiver) + if weak: + receiver_ref = reference(receiver, self._cleanup_receiver) + receiver_ref.receiver_id = receiver_id + else: + receiver_ref = receiver + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + + self.receivers.setdefault(receiver_id, receiver_ref) + self._by_sender[sender_id].add(receiver_id) + self._by_receiver[receiver_id].add(sender_id) + del receiver_ref + + if sender is not ANY and sender_id not in self._weak_senders: + # wire together a cleanup for weakref-able senders + try: + sender_ref = reference(sender, self._cleanup_sender) + sender_ref.sender_id = sender_id + except TypeError: + pass + else: + self._weak_senders.setdefault(sender_id, sender_ref) + del sender_ref + + # broadcast this connection. if receivers raise, disconnect. + if ('receiver_connected' in self.__dict__ and + self.receiver_connected.receivers): + try: + self.receiver_connected.send(self, + receiver=receiver, + sender=sender, + weak=weak) + except: + self.disconnect(receiver, sender) + raise + if receiver_connected.receivers and self is not receiver_connected: + try: + receiver_connected.send(self, + receiver_arg=receiver, + sender_arg=sender, + weak_arg=weak) + except: + self.disconnect(receiver, sender) + raise + return receiver + + def connect_via(self, sender, weak=False): + """Connect the decorated function as a receiver for *sender*. + + :param sender: Any object or :obj:`ANY`. The decorated function + will only receive :meth:`send` emissions sent by *sender*. If + ``ANY``, the receiver will always be notified. A function may be + decorated multiple times with differing *sender* values. + + :param weak: If true, the Signal will hold a weakref to the + decorated function and automatically disconnect when *receiver* + goes out of scope or is garbage collected. Unlike + :meth:`connect`, this defaults to False. + + The decorated function will be invoked by :meth:`send` with + `sender=` as a single positional argument and any \*\*kwargs that + were provided to the call to :meth:`send`. + + + .. versionadded:: 1.1 + + """ + def decorator(fn): + self.connect(fn, sender, weak) + return fn + return decorator + + @contextmanager + def connected_to(self, receiver, sender=ANY): + """Execute a block with the signal temporarily connected to *receiver*. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + This is a context manager for use in the ``with`` statement. It can + be useful in unit tests. *receiver* is connected to the signal for + the duration of the ``with`` block, and will be disconnected + automatically when exiting the block: + + .. testsetup:: + + from __future__ import with_statement + from blinker import Signal + on_ready = Signal() + receiver = lambda sender: None + + .. testcode:: + + with on_ready.connected_to(receiver): + # do stuff + on_ready.send(123) + + .. versionadded:: 1.1 + + """ + self.connect(receiver, sender=sender, weak=False) + try: + yield None + except: + self.disconnect(receiver) + raise + else: + self.disconnect(receiver) + + def temporarily_connected_to(self, receiver, sender=ANY): + """An alias for :meth:`connected_to`. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + .. versionadded:: 0.9 + + .. versionchanged:: 1.1 + Renamed to :meth:`connected_to`. ``temporarily_connected_to`` was + deprecated in 1.2 and will be removed in a subsequent version. + + """ + warn("temporarily_connected_to is deprecated; " + "use connected_to instead.", + DeprecationWarning) + return self.connected_to(receiver, sender) + + def send(self, *sender, **kwargs): + """Emit this signal on behalf of *sender*, passing on \*\*kwargs. + + Returns a list of 2-tuples, pairing receivers with their return + value. The ordering of receiver notification is undefined. + + :param \*sender: Any object or ``None``. If omitted, synonymous + with ``None``. Only accepts one positional argument. + + :param \*\*kwargs: Data to be sent to receivers. + + """ + # Using '*sender' rather than 'sender=None' allows 'sender' to be + # used as a keyword argument- i.e. it's an invisible name in the + # function signature. + if len(sender) == 0: + sender = None + elif len(sender) > 1: + raise TypeError('send() accepts only one positional argument, ' + '%s given' % len(sender)) + else: + sender = sender[0] + if not self.receivers: + return [] + else: + return [(receiver, receiver(sender, **kwargs)) + for receiver in self.receivers_for(sender)] + + def has_receivers_for(self, sender): + """True if there is probably a receiver for *sender*. + + Performs an optimistic check only. Does not guarantee that all + weakly referenced receivers are still alive. See + :meth:`receivers_for` for a stronger search. + + """ + if not self.receivers: + return False + if self._by_sender[ANY_ID]: + return True + if sender is ANY: + return False + return hashable_identity(sender) in self._by_sender + + def receivers_for(self, sender): + """Iterate all live receivers listening for *sender*.""" + # TODO: test receivers_for(ANY) + if self.receivers: + sender_id = hashable_identity(sender) + if sender_id in self._by_sender: + ids = (self._by_sender[ANY_ID] | + self._by_sender[sender_id]) + else: + ids = self._by_sender[ANY_ID].copy() + for receiver_id in ids: + receiver = self.receivers.get(receiver_id) + if receiver is None: + continue + if isinstance(receiver, WeakTypes): + strong = receiver() + if strong is None: + self._disconnect(receiver_id, ANY_ID) + continue + receiver = strong + yield receiver + + def disconnect(self, receiver, sender=ANY): + """Disconnect *receiver* from this signal's events. + + :param receiver: a previously :meth:`connected` callable + + :param sender: a specific sender to disconnect from, or :obj:`ANY` + to disconnect from all senders. Defaults to ``ANY``. + + """ + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + receiver_id = hashable_identity(receiver) + self._disconnect(receiver_id, sender_id) + + if ('receiver_disconnected' in self.__dict__ and + self.receiver_disconnected.receivers): + self.receiver_disconnected.send(self, + receiver=receiver, + sender=sender) + + def _disconnect(self, receiver_id, sender_id): + if sender_id == ANY_ID: + if self._by_receiver.pop(receiver_id, False): + for bucket in self._by_sender.values(): + bucket.discard(receiver_id) + self.receivers.pop(receiver_id, None) + else: + self._by_sender[sender_id].discard(receiver_id) + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_receiver(self, receiver_ref): + """Disconnect a receiver from all senders.""" + self._disconnect(receiver_ref.receiver_id, ANY_ID) + + def _cleanup_sender(self, sender_ref): + """Disconnect all receivers from a sender.""" + sender_id = sender_ref.sender_id + assert sender_id != ANY_ID + self._weak_senders.pop(sender_id, None) + for receiver_id in self._by_sender.pop(sender_id, ()): + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_bookkeeping(self): + """Prune unused sender/receiver bookeeping. Not threadsafe. + + Connecting & disconnecting leave behind a small amount of bookeeping + for the receiver and sender values. Typical workloads using Blinker, + for example in most web apps, Flask, CLI scripts, etc., are not + adversely affected by this bookkeeping. + + With a long-running Python process performing dynamic signal routing + with high volume- e.g. connecting to function closures, "senders" are + all unique object instances, and doing all of this over and over- you + may see memory usage will grow due to extraneous bookeeping. (An empty + set() for each stale sender/receiver pair.) + + This method will prune that bookeeping away, with the caveat that such + pruning is not threadsafe. The risk is that cleanup of a fully + disconnected receiver/sender pair occurs while another thread is + connecting that same pair. If you are in the highly dynamic, unique + receiver/sender situation that has lead you to this method, that + failure mode is perhaps not a big deal for you. + """ + for mapping in (self._by_sender, self._by_receiver): + for _id, bucket in list(mapping.items()): + if not bucket: + mapping.pop(_id, None) + + def _clear_state(self): + """Throw away all signal state. Useful for unit tests.""" + self._weak_senders.clear() + self.receivers.clear() + self._by_sender.clear() + self._by_receiver.clear() + + +receiver_connected = Signal("""\ +Sent by a :class:`Signal` after a receiver connects. + +:argument: the Signal that was connected to +:keyword receiver_arg: the connected receiver +:keyword sender_arg: the sender to connect to +:keyword weak_arg: true if the connection to receiver_arg is a weak reference + +.. deprecated:: 1.2 + +As of 1.2, individual signals have their own private +:attr:`~Signal.receiver_connected` and +:attr:`~Signal.receiver_disconnected` signals with a slightly simplified +call signature. This global signal is planned to be removed in 1.6. + +""") + + +class NamedSignal(Signal): + """A named generic notification emitter.""" + + def __init__(self, name, doc=None): + Signal.__init__(self, doc) + + #: The name of this signal. + self.name = name + + def __repr__(self): + base = Signal.__repr__(self) + return "%s; %r>" % (base[:-1], self.name) + + +class Namespace(dict): + """A mapping of signal names to signals.""" + + def signal(self, name, doc=None): + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] + except KeyError: + return self.setdefault(name, NamedSignal(name, doc)) + + +class WeakNamespace(WeakValueDictionary): + """A weak mapping of signal names to signals. + + Automatically cleans up unused Signals when the last reference goes out + of scope. This namespace implementation exists for a measure of legacy + compatibility with Blinker <= 1.2, and may be dropped in the future. + + .. versionadded:: 1.3 + + """ + + def signal(self, name, doc=None): + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] + except KeyError: + return self.setdefault(name, NamedSignal(name, doc)) + + +signal = Namespace().signal diff --git a/flask/venv/lib/python3.6/site-packages/click/__init__.py b/flask/venv/lib/python3.6/site-packages/click/__init__.py new file mode 100644 index 0000000..d3c3366 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +""" +click +~~~~~ + +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. + +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. +""" + +# Core classes +from .core import Context, BaseCommand, Command, MultiCommand, Group, \ + CommandCollection, Parameter, Option, Argument + +# Globals +from .globals import get_current_context + +# Decorators +from .decorators import pass_context, pass_obj, make_pass_decorator, \ + command, group, argument, option, confirmation_option, \ + password_option, version_option, help_option + +# Types +from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ + DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange + +# Utilities +from .utils import echo, get_binary_stream, get_text_stream, open_file, \ + format_filename, get_app_dir, get_os_args + +# Terminal functions +from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \ + progressbar, clear, style, unstyle, secho, edit, launch, getchar, \ + pause + +# Exceptions +from .exceptions import ClickException, UsageError, BadParameter, \ + FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \ + MissingParameter + +# Formatting +from .formatting import HelpFormatter, wrap_text + +# Parsing +from .parser import OptionParser + + +__all__ = [ + # Core classes + 'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', + 'CommandCollection', 'Parameter', 'Option', 'Argument', + + # Globals + 'get_current_context', + + # Decorators + 'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', + 'argument', 'option', 'confirmation_option', 'password_option', + 'version_option', 'help_option', + + # Types + 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', + 'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', + 'FloatRange', + + # Utilities + 'echo', 'get_binary_stream', 'get_text_stream', 'open_file', + 'format_filename', 'get_app_dir', 'get_os_args', + + # Terminal functions + 'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', + 'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', + 'getchar', 'pause', + + # Exceptions + 'ClickException', 'UsageError', 'BadParameter', 'FileError', + 'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', + 'MissingParameter', + + # Formatting + 'HelpFormatter', 'wrap_text', + + # Parsing + 'OptionParser', +] + + +# Controls if click should emit the warning about the use of unicode +# literals. +disable_unicode_literals_warning = False + + +__version__ = '7.0' diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..5fcb78b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc new file mode 100644 index 0000000..9d64615 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 0000000..1a798c9 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc new file mode 100644 index 0000000..dd51304 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc new file mode 100644 index 0000000..80a6b9d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc new file mode 100644 index 0000000..4eedd8b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc new file mode 100644 index 0000000..7deb546 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc new file mode 100644 index 0000000..6fc7a4f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc new file mode 100644 index 0000000..47baa88 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 0000000..aec957f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc new file mode 100644 index 0000000..d143a94 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc new file mode 100644 index 0000000..06b1b9d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc new file mode 100644 index 0000000..3888bf6 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc new file mode 100644 index 0000000..d33a3da Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc new file mode 100644 index 0000000..d01b7c8 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc new file mode 100644 index 0000000..8af44ca Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..d56d92a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/click/_bashcomplete.py b/flask/venv/lib/python3.6/site-packages/click/_bashcomplete.py new file mode 100644 index 0000000..a5f1084 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_bashcomplete.py @@ -0,0 +1,293 @@ +import copy +import os +import re + +from .utils import echo +from .parser import split_arg_string +from .core import MultiCommand, Option, Argument +from .types import Choice + +try: + from collections import abc +except ImportError: + import collections as abc + +WORDBREAK = '=' + +# Note, only BASH version 4.4 and later have the nosort option. +COMPLETION_SCRIPT_BASH = ''' +%(complete_func)s() { + local IFS=$'\n' + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + %(autocomplete_var)s=complete $1 ) ) + return 0 +} + +%(complete_func)setup() { + local COMPLETION_OPTIONS="" + local BASH_VERSION_ARR=(${BASH_VERSION//./ }) + # Only BASH version 4.4 and later have the nosort option. + if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then + COMPLETION_OPTIONS="-o nosort" + fi + + complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s +} + +%(complete_func)setup +''' + +COMPLETION_SCRIPT_ZSH = ''' +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ + COMP_CWORD=$((CURRENT-1)) \\ + %(autocomplete_var)s=\"complete_zsh\" \\ + %(script_names)s )}") + + for key descr in ${(kv)response}; do + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U -Q + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -Q -a completions + fi + compstate[insert]="automenu" +} + +compdef %(complete_func)s %(script_names)s +''' + +_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') + + +def get_completion_script(prog_name, complete_var, shell): + cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) + script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH + return (script % { + 'complete_func': '_%s_completion' % cf_name, + 'script_names': prog_name, + 'autocomplete_var': complete_var, + }).strip() + ';' + + +def resolve_ctx(cli, prog_name, args): + """ + Parse into a hierarchy of contexts. Contexts are connected through the parent variable. + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :return: the final context/command parsed + """ + ctx = cli.make_context(prog_name, args, resilient_parsing=True) + args = ctx.protected_args + ctx.args + while args: + if isinstance(ctx.command, MultiCommand): + if not ctx.command.chain: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + ctx = cmd.make_context(cmd_name, args, parent=ctx, + resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + # Walk chained subcommand contexts saving the last one. + while args: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True) + args = sub_ctx.args + ctx = sub_ctx + args = sub_ctx.protected_args + sub_ctx.args + else: + break + return ctx + + +def start_of_option(param_str): + """ + :param param_str: param_str to check + :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--") + """ + return param_str and param_str[:1] == '-' + + +def is_incomplete_option(all_args, cmd_param): + """ + :param all_args: the full original list of args supplied + :param cmd_param: the current command paramter + :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and + corresponds to this cmd_param. In other words whether this cmd_param option can still accept + values + """ + if not isinstance(cmd_param, Option): + return False + if cmd_param.is_flag: + return False + last_option = None + for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])): + if index + 1 > cmd_param.nargs: + break + if start_of_option(arg_str): + last_option = arg_str + + return True if last_option and last_option in cmd_param.opts else False + + +def is_incomplete_argument(current_params, cmd_param): + """ + :param current_params: the current params and values for this argument as already entered + :param cmd_param: the current command parameter + :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In + other words whether or not the this cmd_param argument can still accept values + """ + if not isinstance(cmd_param, Argument): + return False + current_param_values = current_params[cmd_param.name] + if current_param_values is None: + return True + if cmd_param.nargs == -1: + return True + if isinstance(current_param_values, abc.Iterable) \ + and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs: + return True + return False + + +def get_user_autocompletions(ctx, args, incomplete, cmd_param): + """ + :param ctx: context associated with the parsed command + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :param cmd_param: command definition + :return: all the possible user-specified completions for the param + """ + results = [] + if isinstance(cmd_param.type, Choice): + # Choices don't support descriptions. + results = [(c, None) + for c in cmd_param.type.choices if str(c).startswith(incomplete)] + elif cmd_param.autocompletion is not None: + dynamic_completions = cmd_param.autocompletion(ctx=ctx, + args=args, + incomplete=incomplete) + results = [c if isinstance(c, tuple) else (c, None) + for c in dynamic_completions] + return results + + +def get_visible_commands_starting_with(ctx, starts_with): + """ + :param ctx: context associated with the parsed command + :starts_with: string that visible commands must start with. + :return: all visible (not hidden) commands that start with starts_with. + """ + for c in ctx.command.list_commands(ctx): + if c.startswith(starts_with): + command = ctx.command.get_command(ctx, c) + if not command.hidden: + yield command + + +def add_subcommand_completions(ctx, incomplete, completions_out): + # Add subcommand completions. + if isinstance(ctx.command, MultiCommand): + completions_out.extend( + [(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)]) + + # Walk up the context list and add any other completion possibilities from chained commands + while ctx.parent is not None: + ctx = ctx.parent + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete) + if c.name not in ctx.protected_args] + completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands]) + + +def get_choices(cli, prog_name, args, incomplete): + """ + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :return: all the possible completions for the incomplete + """ + all_args = copy.deepcopy(args) + + ctx = resolve_ctx(cli, prog_name, args) + if ctx is None: + return [] + + # In newer versions of bash long opts with '='s are partitioned, but it's easier to parse + # without the '=' + if start_of_option(incomplete) and WORDBREAK in incomplete: + partition_incomplete = incomplete.partition(WORDBREAK) + all_args.append(partition_incomplete[0]) + incomplete = partition_incomplete[2] + elif incomplete == WORDBREAK: + incomplete = '' + + completions = [] + if start_of_option(incomplete): + # completions for partial options + for param in ctx.command.params: + if isinstance(param, Option) and not param.hidden: + param_opts = [param_opt for param_opt in param.opts + + param.secondary_opts if param_opt not in all_args or param.multiple] + completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)]) + return completions + # completion for option values from user supplied values + for param in ctx.command.params: + if is_incomplete_option(all_args, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + # completion for argument values from user supplied values + for param in ctx.command.params: + if is_incomplete_argument(ctx.params, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + + add_subcommand_completions(ctx, incomplete, completions) + # Sort before returning so that proper ordering can be enforced in custom types. + return sorted(completions) + + +def do_complete(cli, prog_name, include_descriptions): + cwords = split_arg_string(os.environ['COMP_WORDS']) + cword = int(os.environ['COMP_CWORD']) + args = cwords[1:cword] + try: + incomplete = cwords[cword] + except IndexError: + incomplete = '' + + for item in get_choices(cli, prog_name, args, incomplete): + echo(item[0]) + if include_descriptions: + # ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present. + echo(item[1] if item[1] else '_') + + return True + + +def bashcomplete(cli, prog_name, complete_var, complete_instr): + if complete_instr.startswith('source'): + shell = 'zsh' if complete_instr == 'source_zsh' else 'bash' + echo(get_completion_script(prog_name, complete_var, shell)) + return True + elif complete_instr == 'complete' or complete_instr == 'complete_zsh': + return do_complete(cli, prog_name, complete_instr == 'complete_zsh') + return False diff --git a/flask/venv/lib/python3.6/site-packages/click/_compat.py b/flask/venv/lib/python3.6/site-packages/click/_compat.py new file mode 100644 index 0000000..937e230 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_compat.py @@ -0,0 +1,703 @@ +import re +import io +import os +import sys +import codecs +from weakref import WeakKeyDictionary + + +PY2 = sys.version_info[0] == 2 +CYGWIN = sys.platform.startswith('cygwin') +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) +WIN = sys.platform.startswith('win') and not APP_ENGINE +DEFAULT_COLUMNS = 80 + + +_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])') + + +def get_filesystem_encoding(): + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream(stream, encoding, errors, + force_readable=False, force_writable=False): + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = 'replace' + return _NonClosingTextIOWrapper(stream, encoding, errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable) + + +def is_ascii_encoding(encoding): + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == 'ascii' + except LookupError: + return False + + +def get_best_encoding(stream): + """Returns the default stream encoding if not found.""" + rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return 'utf-8' + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + + def __init__(self, stream, encoding, errors, + force_readable=False, force_writable=False, **extra): + self._stream = stream = _FixupStream(stream, force_readable, + force_writable) + io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) + + # The io module is a place where the Python 3 text behavior + # was forced upon Python 2, so we need to unbreak + # it to look like Python 2. + if PY2: + def write(self, x): + if isinstance(x, str) or is_bytes(x): + try: + self.flush() + except Exception: + pass + return self.buffer.write(str(x)) + return io.TextIOWrapper.write(self, x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __del__(self): + try: + self.detach() + except Exception: + pass + + def isatty(self): + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream(object): + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__(self, stream, force_readable=False, force_writable=False): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name): + return getattr(self._stream, name) + + def read1(self, size): + f = getattr(self._stream, 'read1', None) + if f is not None: + return f(size) + # We only dispatch to readline instead of read in Python 2 as we + # do not want cause problems with the different implementation + # of line buffering. + if PY2: + return self._stream.readline(size) + return self._stream.read(size) + + def readable(self): + if self._force_readable: + return True + x = getattr(self._stream, 'readable', None) + if x is not None: + return x() + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self): + if self._force_writable: + return True + x = getattr(self._stream, 'writable', None) + if x is not None: + return x() + try: + self._stream.write('') + except Exception: + try: + self._stream.write(b'') + except Exception: + return False + return True + + def seekable(self): + x = getattr(self._stream, 'seekable', None) + if x is not None: + return x() + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +if PY2: + text_type = unicode + bytes = str + raw_input = raw_input + string_types = (str, unicode) + int_types = (int, long) + iteritems = lambda x: x.iteritems() + range_type = xrange + + def is_bytes(x): + return isinstance(x, (buffer, bytearray)) + + _identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$') + + # For Windows, we need to force stdout/stdin/stderr to binary if it's + # fetched for that. This obviously is not the most correct way to do + # it as it changes global state. Unfortunately, there does not seem to + # be a clear better way to do it as just reopening the file in binary + # mode does not change anything. + # + # An option would be to do what Python 3 does and to open the file as + # binary only, patch it back to the system, and then use a wrapper + # stream that converts newlines. It's not quite clear what's the + # correct option here. + # + # This code also lives in _winconsole for the fallback to the console + # emulation stream. + # + # There are also Windows environments where the `msvcrt` module is not + # available (which is why we use try-catch instead of the WIN variable + # here), such as the Google App Engine development server on Windows. In + # those cases there is just nothing we can do. + def set_binary_mode(f): + return f + + try: + import msvcrt + except ImportError: + pass + else: + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + msvcrt.setmode(fileno, os.O_BINARY) + return f + + try: + import fcntl + except ImportError: + pass + else: + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + flags = fcntl.fcntl(fileno, fcntl.F_GETFL) + fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) + return f + + def isidentifier(x): + return _identifier_re.search(x) is not None + + def get_binary_stdin(): + return set_binary_mode(sys.stdin) + + def get_binary_stdout(): + _wrap_std_stream('stdout') + return set_binary_mode(sys.stdout) + + def get_binary_stderr(): + _wrap_std_stream('stderr') + return set_binary_mode(sys.stderr) + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdin, encoding, errors, + force_readable=True) + + def get_text_stdout(encoding=None, errors=None): + _wrap_std_stream('stdout') + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdout, encoding, errors, + force_writable=True) + + def get_text_stderr(encoding=None, errors=None): + _wrap_std_stream('stderr') + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stderr, encoding, errors, + force_writable=True) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), 'replace') + return value +else: + import io + text_type = str + raw_input = input + string_types = (str,) + int_types = (int,) + range_type = range + isidentifier = lambda x: x.isidentifier() + iteritems = lambda x: iter(x.items()) + + def is_bytes(x): + return isinstance(x, (bytes, memoryview, bytearray)) + + def _is_binary_reader(stream, default=False): + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + def _is_binary_writer(stream, default=False): + try: + stream.write(b'') + except Exception: + try: + stream.write('') + return False + except Exception: + pass + return default + return True + + def _find_binary_reader(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return stream + + buf = getattr(stream, 'buffer', None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return buf + + def _find_binary_writer(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detatching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return stream + + buf = getattr(stream, 'buffer', None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return buf + + def _stream_is_misconfigured(stream): + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii') + + def _is_compatible_text_stream(stream, encoding, errors): + stream_encoding = getattr(stream, 'encoding', None) + stream_errors = getattr(stream, 'errors', None) + + # Perfect match. + if stream_encoding == encoding and stream_errors == errors: + return True + + # Otherwise, it's only a compatible stream if we did not ask for + # an encoding. + if encoding is None: + return stream_encoding is not None + + return False + + def _force_correct_text_reader(text_reader, encoding, errors, + force_readable=False): + if _is_binary_reader(text_reader, False): + binary_reader = text_reader + else: + # If there is no target encoding set, we need to verify that the + # reader is not actually misconfigured. + if encoding is None and not _stream_is_misconfigured(text_reader): + return text_reader + + if _is_compatible_text_stream(text_reader, encoding, errors): + return text_reader + + # If the reader has no encoding, we try to find the underlying + # binary reader for it. If that fails because the environment is + # misconfigured, we silently go with the same reader because this + # is too common to happen. In that case, mojibake is better than + # exceptions. + binary_reader = _find_binary_reader(text_reader) + if binary_reader is None: + return text_reader + + # At this point, we default the errors to replace instead of strict + # because nobody handles those errors anyways and at this point + # we're so fundamentally fucked that nothing can repair it. + if errors is None: + errors = 'replace' + return _make_text_stream(binary_reader, encoding, errors, + force_readable=force_readable) + + def _force_correct_text_writer(text_writer, encoding, errors, + force_writable=False): + if _is_binary_writer(text_writer, False): + binary_writer = text_writer + else: + # If there is no target encoding set, we need to verify that the + # writer is not actually misconfigured. + if encoding is None and not _stream_is_misconfigured(text_writer): + return text_writer + + if _is_compatible_text_stream(text_writer, encoding, errors): + return text_writer + + # If the writer has no encoding, we try to find the underlying + # binary writer for it. If that fails because the environment is + # misconfigured, we silently go with the same writer because this + # is too common to happen. In that case, mojibake is better than + # exceptions. + binary_writer = _find_binary_writer(text_writer) + if binary_writer is None: + return text_writer + + # At this point, we default the errors to replace instead of strict + # because nobody handles those errors anyways and at this point + # we're so fundamentally fucked that nothing can repair it. + if errors is None: + errors = 'replace' + return _make_text_stream(binary_writer, encoding, errors, + force_writable=force_writable) + + def get_binary_stdin(): + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stdin.') + return reader + + def get_binary_stdout(): + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stdout.') + return writer + + def get_binary_stderr(): + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError('Was not able to determine binary ' + 'stream for sys.stderr.') + return writer + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, + force_readable=True) + + def get_text_stdout(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, + force_writable=True) + + def get_text_stderr(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, + force_writable=True) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), 'replace') + else: + value = value.encode('utf-8', 'surrogateescape') \ + .decode('utf-8', 'replace') + return value + + +def get_streerror(e, default=None): + if hasattr(e, 'strerror'): + msg = e.strerror + else: + if default is not None: + msg = default + else: + msg = str(e) + if isinstance(msg, bytes): + msg = msg.decode('utf-8', 'replace') + return msg + + +def open_stream(filename, mode='r', encoding=None, errors='strict', + atomic=False): + # Standard streams first. These are simple because they don't need + # special handling for the atomic flag. It's entirely ignored. + if filename == '-': + if any(m in mode for m in ['w', 'a', 'x']): + if 'b' in mode: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if 'b' in mode: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + if encoding is None: + return open(filename, mode), True + return io.open(filename, mode, encoding=encoding, errors=errors), True + + # Some usability stuff for atomic writes + if 'a' in mode: + raise ValueError( + 'Appending to an existing file is not supported, because that ' + 'would involve an expensive `copy`-operation to a temporary ' + 'file. Open the file in normal `w`-mode and copy explicitly ' + 'if that\'s what you\'re after.' + ) + if 'x' in mode: + raise ValueError('Use the `overwrite`-parameter instead.') + if 'w' not in mode: + raise ValueError('Atomic writes only make sense with `w`-mode.') + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import tempfile + fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename), + prefix='.__atomic-write') + + if encoding is not None: + f = io.open(fd, mode, encoding=encoding, errors=errors) + else: + f = os.fdopen(fd, mode) + + return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True + + +# Used in a destructor call, needs extra protection from interpreter cleanup. +if hasattr(os, 'replace'): + _replace = os.replace + _can_replace = True +else: + _replace = os.rename + _can_replace = not WIN + + +class _AtomicFile(object): + + def __init__(self, f, tmp_filename, real_filename): + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self): + return self._real_filename + + def close(self, delete=False): + if self.closed: + return + self._f.close() + if not _can_replace: + try: + os.remove(self._real_filename) + except OSError: + pass + _replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name): + return getattr(self._f, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close(delete=exc_type is not None) + + def __repr__(self): + return repr(self._f) + + +auto_wrap_for_ansi = None +colorama = None +get_winterm_size = None + + +def strip_ansi(value): + return _ansi_re.sub('', value) + + +def should_strip_ansi(stream=None, color=None): + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) + return not color + + +# If we're on Windows, we provide transparent integration through +# colorama. This will make ANSI colors through the echo function +# work automatically. +if WIN: + # Windows has a smaller terminal + DEFAULT_COLUMNS = 79 + + from ._winconsole import _get_windows_console_stream, _wrap_std_stream + + def _get_argv_encoding(): + import locale + return locale.getpreferredencoding() + + if PY2: + def raw_input(prompt=''): + sys.stderr.flush() + if prompt: + stdout = _default_text_stdout() + stdout.write(prompt) + stdin = _default_text_stdin() + return stdin.readline().rstrip('\r\n') + + try: + import colorama + except ImportError: + pass + else: + _ansi_stream_wrappers = WeakKeyDictionary() + + def auto_wrap_for_ansi(stream, color=None): + """This function wraps a stream so that calls through colorama + are issued to the win32 console API to recolor on demand. It + also ensures to reset the colors if a write call is interrupted + to not destroy the console afterwards. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + if cached is not None: + return cached + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = ansi_wrapper.stream + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + return rv + + def get_winterm_size(): + win = colorama.win32.GetConsoleScreenBufferInfo( + colorama.win32.STDOUT).srWindow + return win.Right - win.Left, win.Bottom - win.Top +else: + def _get_argv_encoding(): + return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() + + _get_windows_console_stream = lambda *x: None + _wrap_std_stream = lambda *x: None + + +def term_len(x): + return len(strip_ansi(x)) + + +def isatty(stream): + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func(src_func, wrapper_func): + cache = WeakKeyDictionary() + def func(): + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + stream = src_func() # In case wrapper_func() modified the stream + cache[stream] = rv + except Exception: + pass + return rv + return func + + +_default_text_stdin = _make_cached_stream_func( + lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func( + lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func( + lambda: sys.stderr, get_text_stderr) + + +binary_streams = { + 'stdin': get_binary_stdin, + 'stdout': get_binary_stdout, + 'stderr': get_binary_stderr, +} + +text_streams = { + 'stdin': get_text_stdin, + 'stdout': get_text_stdout, + 'stderr': get_text_stderr, +} diff --git a/flask/venv/lib/python3.6/site-packages/click/_termui_impl.py b/flask/venv/lib/python3.6/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..00a8e5e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_termui_impl.py @@ -0,0 +1,621 @@ +# -*- coding: utf-8 -*- +""" +click._termui_impl +~~~~~~~~~~~~~~~~~~ + +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. + +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. +""" + +import os +import sys +import time +import math +import contextlib +from ._compat import _default_text_stdout, range_type, PY2, isatty, \ + open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \ + CYGWIN +from .utils import echo +from .exceptions import ClickException + + +if os.name == 'nt': + BEFORE_BAR = '\r' + AFTER_BAR = '\n' +else: + BEFORE_BAR = '\r\033[?25l' + AFTER_BAR = '\033[?25h\n' + + +def _length_hint(obj): + """Returns the length hint of an object.""" + try: + return len(obj) + except (AttributeError, TypeError): + try: + get_hint = type(obj).__length_hint__ + except AttributeError: + return None + try: + hint = get_hint(obj) + except TypeError: + return None + if hint is NotImplemented or \ + not isinstance(hint, int_types) or \ + hint < 0: + return None + return hint + + +class ProgressBar(object): + + def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', + bar_template='%(bar)s', info_sep=' ', show_eta=True, + show_percent=None, show_pos=False, item_show_func=None, + label=None, file=None, color=None, width=30): + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or '' + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.width = width + self.autowidth = width == 0 + + if length is None: + length = _length_hint(iterable) + if iterable is None: + if length is None: + raise TypeError('iterable or length is required') + iterable = range_type(length) + self.iter = iter(iterable) + self.length = length + self.length_known = length is not None + self.pos = 0 + self.avg = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width = None + self.entered = False + self.current_item = None + self.is_hidden = not isatty(self.file) + self._last_line = None + self.short_limit = 0.5 + + def __enter__(self): + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.render_finish() + + def __iter__(self): + if not self.entered: + raise RuntimeError('You need to use progress bars in a with block.') + self.render_progress() + return self.generator() + + def is_fast(self): + return time.time() - self.start <= self.short_limit + + def render_finish(self): + if self.is_hidden or self.is_fast(): + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self): + if self.finished: + return 1.0 + return min(self.pos / (float(self.length) or 1), 1.0) + + @property + def time_per_iteration(self): + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self): + if self.length_known and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self): + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + days = t + return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) + else: + return '%02d:%02d:%02d' % (hours, minutes, seconds) + return '' + + def format_pos(self): + pos = str(self.pos) + if self.length_known: + pos += '/%s' % self.length + return pos + + def format_pct(self): + return ('% 4d%%' % int(self.pct * 100))[1:] + + def format_bar(self): + if self.length_known: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + bar = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + bar[int((math.cos(self.pos * self.time_per_iteration) + / 2.0 + 0.5) * self.width)] = self.fill_char + bar = ''.join(bar) + return bar + + def format_progress_line(self): + show_percent = self.show_percent + + info_bits = [] + if self.length_known and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return (self.bar_template % { + 'label': self.label, + 'bar': self.format_bar(), + 'info': self.info_sep.join(info_bits) + }).rstrip() + + def render_progress(self): + from .termui import get_terminal_size + + if self.is_hidden: + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, get_terminal_size()[0] - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(' ' * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(' ' * (clear_width - line_len)) + line = ''.join(buf) + # Render the line only if it changed. + + if line != self._last_line and not self.is_fast(): + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps): + self.pos += n_steps + if self.length_known and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length_known + + def update(self, n_steps): + self.make_step(n_steps) + self.render_progress() + + def finish(self): + self.eta_known = 0 + self.current_item = None + self.finished = True + + def generator(self): + """ + Returns a generator which yields the items added to the bar during + construction, and updates the progress bar *after* the yielded block + returns. + """ + if not self.entered: + raise RuntimeError('You need to use progress bars in a with block.') + + if self.is_hidden: + for rv in self.iter: + yield rv + else: + for rv in self.iter: + self.current_item = rv + yield rv + self.update(1) + self.finish() + self.render_progress() + + +def pager(generator, color=None): + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get('PAGER', None) or '').strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get('TERM') in ('dumb', 'emacs'): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith('os2'): + return _tempfilepager(generator, 'more <', color) + if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: + return _pipepager(generator, 'less', color) + + import tempfile + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: + return _pipepager(generator, 'more', color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator, cmd, color): + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit('/', 1)[-1].split() + if color is None and cmd_detail[0] == 'less': + less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) + if not less_flags: + env['LESS'] = '-R' + color = True + elif 'r' in less_flags or 'R' in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, + env=env) + encoding = get_best_encoding(c.stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text.encode(encoding, 'replace')) + except (IOError, KeyboardInterrupt): + pass + else: + c.stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager(generator, cmd, color): + """Page through text by invoking a program on a temporary file.""" + import tempfile + filename = tempfile.mktemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, 'wb')[0] as f: + f.write(text.encode(encoding)) + try: + os.system(cmd + ' "' + filename + '"') + finally: + os.unlink(filename) + + +def _nullpager(stream, generator, color): + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor(object): + + def __init__(self, editor=None, env=None, require_save=True, + extension='.txt'): + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self): + if self.editor is not None: + return self.editor + for key in 'VISUAL', 'EDITOR': + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return 'notepad' + for editor in 'vim', 'nano': + if os.system('which %s >/dev/null 2>&1' % editor) == 0: + return editor + return 'vi' + + def edit_file(self, filename): + import subprocess + editor = self.get_editor() + if self.env: + environ = os.environ.copy() + environ.update(self.env) + else: + environ = None + try: + c = subprocess.Popen('%s "%s"' % (editor, filename), + env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException('%s: Editing failed!' % editor) + except OSError as e: + raise ClickException('%s: Editing failed: %s' % (editor, e)) + + def edit(self, text): + import tempfile + + text = text or '' + if text and not text.endswith('\n'): + text += '\n' + + fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) + try: + if WIN: + encoding = 'utf-8-sig' + text = text.replace('\n', '\r\n') + else: + encoding = 'utf-8' + text = text.encode(encoding) + + f = os.fdopen(fd, 'wb') + f.write(text) + f.close() + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save \ + and os.path.getmtime(name) == timestamp: + return None + + f = open(name, 'rb') + try: + rv = f.read() + finally: + f.close() + return rv.decode('utf-8-sig').replace('\r\n', '\n') + finally: + os.unlink(name) + + +def open_url(url, wait=False, locate=False): + import subprocess + + def _unquote_file(url): + try: + import urllib + except ImportError: + import urllib + if url.startswith('file://'): + url = urllib.unquote(url[7:]) + return url + + if sys.platform == 'darwin': + args = ['open'] + if wait: + args.append('-W') + if locate: + args.append('-R') + args.append(_unquote_file(url)) + null = open('/dev/null', 'w') + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = 'explorer /select,"%s"' % _unquote_file( + url.replace('"', '')) + else: + args = 'start %s "" "%s"' % ( + wait and '/WAIT' or '', url.replace('"', '')) + return os.system(args) + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', '')) + else: + args = 'cygstart %s "%s"' % ( + wait and '-w' or '', url.replace('"', '')) + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or '.' + else: + url = _unquote_file(url) + c = subprocess.Popen(['xdg-open', url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(('http://', 'https://')) and not locate and not wait: + import webbrowser + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch): + if ch == u'\x03': + raise KeyboardInterrupt() + if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D + raise EOFError() + if ch == u'\x1a' and WIN: # Windows, Ctrl+Z + raise EOFError() + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal(): + yield + + def getchar(echo): + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + if echo: + func = msvcrt.getwche + else: + func = msvcrt.getwch + + rv = func() + if rv in (u'\x00', u'\xe0'): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + _translate_ch_to_exc(rv) + return rv +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal(): + if not isatty(sys.stdin): + f = open('/dev/tty') + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + try: + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo): + with raw_terminal() as fd: + ch = os.read(fd, 32) + ch = ch.decode(get_best_encoding(sys.stdin), 'replace') + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + _translate_ch_to_exc(ch) + return ch diff --git a/flask/venv/lib/python3.6/site-packages/click/_textwrap.py b/flask/venv/lib/python3.6/site-packages/click/_textwrap.py new file mode 100644 index 0000000..7e77603 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_textwrap.py @@ -0,0 +1,38 @@ +import textwrap +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent): + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text): + rv = [] + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + if idx > 0: + indent = self.subsequent_indent + rv.append(indent + line) + return '\n'.join(rv) diff --git a/flask/venv/lib/python3.6/site-packages/click/_unicodefun.py b/flask/venv/lib/python3.6/site-packages/click/_unicodefun.py new file mode 100644 index 0000000..620edff --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_unicodefun.py @@ -0,0 +1,125 @@ +import os +import sys +import codecs + +from ._compat import PY2 + + +# If someone wants to vendor click, we want to ensure the +# correct package is discovered. Ideally we could use a +# relative import here but unfortunately Python does not +# support that. +click = sys.modules[__name__.rsplit('.', 1)[0]] + + +def _find_unicode_literals_frame(): + import __future__ + if not hasattr(sys, '_getframe'): # not all Python implementations have it + return 0 + frm = sys._getframe(1) + idx = 1 + while frm is not None: + if frm.f_globals.get('__name__', '').startswith('click.'): + frm = frm.f_back + idx += 1 + elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: + return idx + else: + break + return 0 + + +def _check_for_unicode_literals(): + if not __debug__: + return + if not PY2 or click.disable_unicode_literals_warning: + return + bad_frame = _find_unicode_literals_frame() + if bad_frame <= 0: + return + from warnings import warn + warn(Warning('Click detected the use of the unicode_literals ' + '__future__ import. This is heavily discouraged ' + 'because it can introduce subtle bugs in your ' + 'code. You should instead use explicit u"" literals ' + 'for your unicode strings. For more information see ' + 'https://click.palletsprojects.com/python3/'), + stacklevel=bad_frame) + + +def _verify_python3_env(): + """Ensures that the environment is good for unicode on Python 3.""" + if PY2: + return + try: + import locale + fs_enc = codecs.lookup(locale.getpreferredencoding()).name + except Exception: + fs_enc = 'ascii' + if fs_enc != 'ascii': + return + + extra = '' + if os.name == 'posix': + import subprocess + try: + rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate()[0] + except OSError: + rv = b'' + good_locales = set() + has_c_utf8 = False + + # Make sure we're operating on text here. + if isinstance(rv, bytes): + rv = rv.decode('ascii', 'replace') + + for line in rv.splitlines(): + locale = line.strip() + if locale.lower().endswith(('.utf-8', '.utf8')): + good_locales.add(locale) + if locale.lower() in ('c.utf8', 'c.utf-8'): + has_c_utf8 = True + + extra += '\n\n' + if not good_locales: + extra += ( + 'Additional information: on this system no suitable UTF-8\n' + 'locales were discovered. This most likely requires resolving\n' + 'by reconfiguring the locale system.' + ) + elif has_c_utf8: + extra += ( + 'This system supports the C.UTF-8 locale which is recommended.\n' + 'You might be able to resolve your issue by exporting the\n' + 'following environment variables:\n\n' + ' export LC_ALL=C.UTF-8\n' + ' export LANG=C.UTF-8' + ) + else: + extra += ( + 'This system lists a couple of UTF-8 supporting locales that\n' + 'you can pick from. The following suitable locales were\n' + 'discovered: %s' + ) % ', '.join(sorted(good_locales)) + + bad_locale = None + for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): + if locale and locale.lower().endswith(('.utf-8', '.utf8')): + bad_locale = locale + if locale is not None: + break + if bad_locale is not None: + extra += ( + '\n\nClick discovered that you exported a UTF-8 locale\n' + 'but the locale system could not pick up from it because\n' + 'it does not exist. The exported locale is "%s" but it\n' + 'is not supported' + ) % bad_locale + + raise RuntimeError( + 'Click will abort further execution because Python 3 was' + ' configured to use ASCII as encoding for the environment.' + ' Consult https://click.palletsprojects.com/en/7.x/python3/ for' + ' mitigation steps.' + extra + ) diff --git a/flask/venv/lib/python3.6/site-packages/click/_winconsole.py b/flask/venv/lib/python3.6/site-packages/click/_winconsole.py new file mode 100644 index 0000000..bbb080d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/_winconsole.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prmopt. + +import io +import os +import sys +import zlib +import time +import ctypes +import msvcrt +from ._compat import _NonClosingTextIOWrapper, text_type, PY2 +from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ + c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE +try: + from ctypes import pythonapi + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release +except ImportError: + pythonapi = None +from ctypes.wintypes import LPWSTR, LPCWSTR + + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)( + ('GetCommandLineW', windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE( + POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ('CommandLineToArgvW', windll.shell32)) + + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b'\x1a' +MAX_BYTES_WRITTEN = 32767 + + +class Py_buffer(ctypes.Structure): + _fields_ = [ + ('buf', c_void_p), + ('obj', py_object), + ('len', c_ssize_t), + ('itemsize', c_ssize_t), + ('readonly', c_int), + ('ndim', c_int), + ('format', c_char_p), + ('shape', c_ssize_p), + ('strides', c_ssize_p), + ('suboffsets', c_ssize_p), + ('internal', c_void_p) + ] + + if PY2: + _fields_.insert(-1, ('smalltable', c_ssize_t * 2)) + + +# On PyPy we cannot get buffers so our ability to operate here is +# serverly limited. +if pythonapi is None: + get_buffer = None +else: + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + + def __init__(self, handle): + self.handle = handle + + def isatty(self): + io.RawIOBase.isatty(self) + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError('cannot read odd number of bytes from ' + 'UTF-16-LE encoded console') + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read, + byref(code_units_read), None) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError('Windows error: %s' % GetLastError()) + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return 'ERROR_SUCCESS' + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return 'ERROR_NOT_ENOUGH_MEMORY' + return 'Windows error %s' % errno + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, + MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW(self.handle, buf, code_units_to_be_written, + byref(code_units_written), None) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream(object): + + def __init__(self, text_stream, byte_stream): + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self): + return self.buffer.name + + def write(self, x): + if isinstance(x, text_type): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __getattr__(self, name): + return getattr(self._text_stream, name) + + def isatty(self): + return self.buffer.isatty() + + def __repr__(self): + return '' % ( + self.name, + self.encoding, + ) + + +class WindowsChunkedWriter(object): + """ + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()' which we wrap to write in + limited chunks due to a Windows limitation on binary console streams. + """ + def __init__(self, wrapped): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + total_to_write = len(text) + written = 0 + + while written < total_to_write: + to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) + self.__wrapped.write(text[written:written+to_write]) + written += to_write + + +_wrapped_std_streams = set() + + +def _wrap_std_stream(name): + # Python 2 & Windows 7 and below + if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams: + setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) + _wrapped_std_streams.add(name) + + +def _get_text_stdin(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stdout(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stderr(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + 'utf-16-le', 'strict', line_buffering=True) + return ConsoleStream(text_stream, buffer_stream) + + +if PY2: + def _hash_py_argv(): + return zlib.crc32('\x00'.join(sys.argv[1:])) + + _initial_argv_hash = _hash_py_argv() + + def _get_windows_argv(): + argc = c_int(0) + argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) + argv = [argv_unicode[i] for i in range(0, argc.value)] + + if not hasattr(sys, 'frozen'): + argv = argv[1:] + while len(argv) > 0: + arg = argv[0] + if not arg.startswith('-') or arg == '-': + break + argv = argv[1:] + if arg.startswith(('-c', '-m')): + break + + return argv[1:] + + +_stream_factories = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _get_windows_console_stream(f, encoding, errors): + if get_buffer is not None and \ + encoding in ('utf-16-le', None) \ + and errors in ('strict', None) and \ + hasattr(f, 'isatty') and f.isatty(): + func = _stream_factories.get(f.fileno()) + if func is not None: + if not PY2: + f = getattr(f, 'buffer', None) + if f is None: + return None + else: + # If we are on Python 2 we need to set the stream that we + # deal with to binary mode as otherwise the exercise if a + # bit moot. The same problems apply as for + # get_binary_stdin and friends from _compat. + msvcrt.setmode(f.fileno(), os.O_BINARY) + return func(f) diff --git a/flask/venv/lib/python3.6/site-packages/click/core.py b/flask/venv/lib/python3.6/site-packages/click/core.py new file mode 100644 index 0000000..7a1e342 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/core.py @@ -0,0 +1,1856 @@ +import errno +import inspect +import os +import sys +from contextlib import contextmanager +from itertools import repeat +from functools import update_wrapper + +from .types import convert_type, IntRange, BOOL +from .utils import PacifyFlushWrapper, make_str, make_default_short_help, \ + echo, get_os_args +from .exceptions import ClickException, UsageError, BadParameter, Abort, \ + MissingParameter, Exit +from .termui import prompt, confirm, style +from .formatting import HelpFormatter, join_options +from .parser import OptionParser, split_opt +from .globals import push_context, pop_context + +from ._compat import PY2, isidentifier, iteritems, string_types +from ._unicodefun import _check_for_unicode_literals, _verify_python3_env + + +_missing = object() + + +SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...' +SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...' + +DEPRECATED_HELP_NOTICE = ' (DEPRECATED)' +DEPRECATED_INVOKE_NOTICE = 'DeprecationWarning: ' + \ + 'The command %(name)s is deprecated.' + + +def _maybe_show_deprecated_notice(cmd): + if cmd.deprecated: + echo(style(DEPRECATED_INVOKE_NOTICE % {'name': cmd.name}, fg='red'), err=True) + + +def fast_exit(code): + """Exit without garbage collection, this speeds up exit by about 10ms for + things like bash completion. + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(code) + + +def _bashcomplete(cmd, prog_name, complete_var=None): + """Internal handler for the bash completion support.""" + if complete_var is None: + complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() + complete_instr = os.environ.get(complete_var) + if not complete_instr: + return + + from ._bashcomplete import bashcomplete + if bashcomplete(cmd, prog_name, complete_var, complete_instr): + fast_exit(1) + + +def _check_multicommand(base_command, cmd_name, cmd, register=False): + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = 'It is not possible to add multi commands as children to ' \ + 'another multi command that is in chain mode' + else: + hint = 'Found a multi command as subcommand to a multi command ' \ + 'that is in chain mode. This is not supported' + raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ' + 'added as subcommand but it in itself is a ' + 'multi command. ("%s" is a %s within a chained ' + '%s named "%s").' % ( + hint, base_command.name, cmd_name, + cmd_name, cmd.__class__.__name__, + base_command.__class__.__name__, + base_command.name)) + + +def batch(iterable, batch_size): + return list(zip(*repeat(iter(iterable), batch_size))) + + +def invoke_param_callback(callback, ctx, param, value): + code = getattr(callback, '__code__', None) + args = getattr(code, 'co_argcount', 3) + + if args < 3: + # This will become a warning in Click 3.0: + from warnings import warn + warn(Warning('Invoked legacy parameter callback "%s". The new ' + 'signature for such callbacks starting with ' + 'click 2.0 is (ctx, param, value).' + % callback), stacklevel=3) + return callback(ctx, value) + return callback(ctx, param, value) + + +@contextmanager +def augment_usage_errors(ctx, param=None): + """Context manager that attaches extra information to exceptions that + fly. + """ + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing(invocation_order, declaration_order): + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + def sort_key(item): + try: + idx = invocation_order.index(item) + except ValueError: + idx = float('inf') + return (not item.is_eager, idx) + + return sorted(declaration_order, key=sort_key) + + +class Context(object): + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + .. versionadded:: 2.0 + Added the `resilient_parsing`, `help_option_names`, + `token_normalize_func` parameters. + + .. versionadded:: 3.0 + Added the `allow_extra_args` and `allow_interspersed_args` + parameters. + + .. versionadded:: 4.0 + Added the `color`, `ignore_unknown_options`, and + `max_content_width` parameters. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + """ + + def __init__(self, command, parent=None, info_name=None, obj=None, + auto_envvar_prefix=None, default_map=None, + terminal_width=None, max_content_width=None, + resilient_parsing=False, allow_extra_args=None, + allow_interspersed_args=None, + ignore_unknown_options=None, help_option_names=None, + token_normalize_func=None, color=None): + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: the parsed parameters except if the value is hidden in which + #: case it's not remembered. + self.params = {} + #: the leftover arguments. + self.args = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args = [] + if obj is None and parent is not None: + obj = parent.obj + #: the user object stored. + self.obj = obj + self._meta = getattr(parent, 'meta', {}) + + #: A dictionary (-like object) with defaults for parameters. + if default_map is None \ + and parent is not None \ + and parent.default_map is not None: + default_map = parent.default_map.get(info_name) + self.default_map = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`resultcallback`. + self.invoked_subcommand = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + #: The width of the terminal (None is autodetection). + self.terminal_width = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ['--help'] + + #: The names for the help options. + self.help_option_names = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if parent is not None \ + and parent.auto_envvar_prefix is not None and \ + self.info_name is not None: + auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix, + self.info_name.upper()) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + self.auto_envvar_prefix = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color = color + + self._close_callbacks = [] + self._depth = 0 + + def __enter__(self): + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup=True): + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self): + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = __name__ + '.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self): + """Creates the formatter for the help and usage output.""" + return HelpFormatter(width=self.terminal_width, + max_width=self.max_content_width) + + def call_on_close(self, f): + """This decorator remembers a function as callback that should be + executed when the context tears down. This is most useful to bind + resource handling to the script execution. For instance, file objects + opened by the :class:`File` type will register their close callbacks + here. + + :param f: the function to execute on teardown. + """ + self._close_callbacks.append(f) + return f + + def close(self): + """Invokes all close callbacks.""" + for cb in self._close_callbacks: + cb() + self._close_callbacks = [] + + @property + def command_path(self): + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = '' + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + rv = self.parent.command_path + ' ' + rv + return rv.lstrip() + + def find_root(self): + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type): + """Finds the closest object of a given type.""" + node = self + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + node = node.parent + + def ensure_object(self, object_type): + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + def lookup_default(self, name): + """Looks up the default for a parameter name. This by default + looks into the :attr:`default_map` if available. + """ + if self.default_map is not None: + rv = self.default_map.get(name) + if callable(rv): + rv = rv() + return rv + + def fail(self, message): + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self): + """Aborts the script.""" + raise Abort() + + def exit(self, code=0): + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self): + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self): + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def invoke(*args, **kwargs): + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + """ + self, callback = args[:2] + ctx = self + + # It's also possible to invoke another command which might or + # might not have a callback. In that case we also fill + # in defaults and make a new context for this command. + if isinstance(callback, Command): + other_cmd = callback + callback = other_cmd.callback + ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) + if callback is None: + raise TypeError('The given command does not have a ' + 'callback that can be invoked.') + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.get_default(ctx) + + args = args[2:] + with augment_usage_errors(self): + with ctx: + return callback(*args, **kwargs) + + def forward(*args, **kwargs): + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + """ + self, cmd = args[:2] + + # It's also possible to invoke another command which might or + # might not have a callback. + if not isinstance(cmd, Command): + raise TypeError('Callback is not a command.') + + for param in self.params: + if param not in kwargs: + kwargs[param] = self.params[param] + + return self.invoke(cmd, **kwargs) + + +class BaseCommand(object): + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__(self, name, context_settings=None): + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + if context_settings is None: + context_settings = {} + #: an optional dictionary with defaults passed to the context. + self.context_settings = context_settings + + def get_usage(self, ctx): + raise NotImplementedError('Base commands cannot get usage') + + def get_help(self, ctx): + raise NotImplementedError('Base commands cannot get help') + + def make_context(self, info_name, args, parent=None, **extra): + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + :param info_name: the info name for this invokation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the script. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + """ + for key, value in iteritems(self.context_settings): + if key not in extra: + extra[key] = value + ctx = Context(self, info_name=info_name, parent=parent, **extra) + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx, args): + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError('Base commands do not know how to parse ' + 'arguments.') + + def invoke(self, ctx): + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError('Base commands are not invokable by default') + + def main(self, args=None, prog_name=None, complete_var=None, + standalone_mode=True, **extra): + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + .. versionadded:: 3.0 + Added the `standalone_mode` flag to control the standalone mode. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + """ + # If we are in Python 3, we will verify that the environment is + # sane at this point or reject further execution to avoid a + # broken script. + if not PY2: + _verify_python3_env() + else: + _check_for_unicode_literals() + + if args is None: + args = get_os_args() + else: + args = list(args) + + if prog_name is None: + prog_name = make_str(os.path.basename( + sys.argv and sys.argv[0] or __file__)) + + # Hook for the Bash completion. This only activates if the Bash + # completion is actually enabled, otherwise this is quite a fast + # noop. + _bashcomplete(self, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except IOError as e: + if e.errno == errno.EPIPE: + sys.stdout = PacifyFlushWrapper(sys.stdout) + sys.stderr = PacifyFlushWrapper(sys.stderr) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo('Aborted!', file=sys.stderr) + sys.exit(1) + + def __call__(self, *args, **kwargs): + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + """ + + def __init__(self, name, context_settings=None, callback=None, + params=None, help=None, epilog=None, short_help=None, + options_metavar='[OPTIONS]', add_help_option=True, + hidden=False, deprecated=False): + BaseCommand.__init__(self, name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params = params or [] + # if a form feed (page break) is found in the help text, truncate help + # text to the content preceding the first form feed + if help and '\f' in help: + help = help.split('\f', 1)[0] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.hidden = hidden + self.deprecated = deprecated + + def get_usage(self, ctx): + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip('\n') + + def get_params(self, ctx): + rv = self.params + help_option = self.get_help_option(ctx) + if help_option is not None: + rv = rv + [help_option] + return rv + + def format_usage(self, ctx, formatter): + """Writes the usage line into the formatter.""" + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, ' '.join(pieces)) + + def collect_usage_pieces(self, ctx): + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + return rv + + def get_help_option_names(self, ctx): + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return all_names + + def get_help_option(self, ctx): + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + if not help_options or not self.add_help_option: + return + + def show_help(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + return Option(help_options, is_flag=True, + is_eager=True, expose_value=False, + callback=show_help, + help='Show this message and exit.') + + def make_parser(self, ctx): + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx): + """Formats the help into a string and returns it. This creates a + formatter and will call into the following formatting methods: + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip('\n') + + def get_short_help_str(self, limit=45): + """Gets short help for the command or makes it by shortening the long help string.""" + return self.short_help or self.help and make_default_short_help(self.help, limit) or '' + + def format_help(self, ctx, formatter): + """Writes the help into the formatter if it exists. + + This calls into the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx, formatter): + """Writes the help text to the formatter if it exists.""" + if self.help: + formatter.write_paragraph() + with formatter.indentation(): + help_text = self.help + if self.deprecated: + help_text += DEPRECATED_HELP_NOTICE + formatter.write_text(help_text) + elif self.deprecated: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(DEPRECATED_HELP_NOTICE) + + def format_options(self, ctx, formatter): + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section('Options'): + formatter.write_dl(opts) + + def format_epilog(self, ctx, formatter): + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(self.epilog) + + def parse_args(self, ctx, args): + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing( + param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail('Got unexpected extra argument%s (%s)' + % (len(args) != 1 and 's' or '', + ' '.join(map(make_str, args)))) + + ctx.args = args + return args + + def invoke(self, ctx): + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + _maybe_show_deprecated_notice(self) + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: the result callback to attach to this multi + command. + """ + allow_extra_args = True + allow_interspersed_args = False + + def __init__(self, name=None, invoke_without_command=False, + no_args_is_help=None, subcommand_metavar=None, + chain=False, result_callback=None, **attrs): + Command.__init__(self, name, **attrs) + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + if subcommand_metavar is None: + if chain: + subcommand_metavar = SUBCOMMANDS_METAVAR + else: + subcommand_metavar = SUBCOMMAND_METAVAR + self.subcommand_metavar = subcommand_metavar + self.chain = chain + #: The result callback that is stored. This can be set or + #: overridden with the :func:`resultcallback` decorator. + self.result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError('Multi commands in chain mode cannot ' + 'have optional arguments.') + + def collect_usage_pieces(self, ctx): + rv = Command.collect_usage_pieces(self, ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx, formatter): + Command.format_options(self, ctx, formatter) + self.format_commands(ctx, formatter) + + def resultcallback(self, replace=False): + """Adds a result callback to the chain command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.resultcallback() + def process_result(result, input): + return result + input + + .. versionadded:: 3.0 + + :param replace: if set to `True` an already existing result + callback will be removed. + """ + def decorator(f): + old_callback = self.result_callback + if old_callback is None or replace: + self.result_callback = f + return f + def function(__value, *args, **kwargs): + return f(old_callback(__value, *args, **kwargs), + *args, **kwargs) + self.result_callback = rv = update_wrapper(function, f) + return rv + return decorator + + def format_commands(self, ctx, formatter): + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section('Commands'): + formatter.write_dl(rows) + + def parse_args(self, ctx, args): + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = Command.parse_args(self, ctx, args) + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx): + def _process_result(value): + if self.result_callback is not None: + value = ctx.invoke(self.result_callback, value, + **ctx.params) + return value + + if not ctx.protected_args: + # If we are invoked without command the chain flag controls + # how this happens. If we are not in chain mode, the return + # value here is the return value of the command. + # If however we are in chain mode, the return value is the + # return value of the result processor invoked with an empty + # list (which means that no subcommand actually was executed). + if self.invoke_without_command: + if not self.chain: + return Command.invoke(self, ctx) + with ctx: + Command.invoke(self, ctx) + return _process_result([]) + ctx.fail('Missing command.') + + # Fetch args back out + args = ctx.protected_args + ctx.args + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + ctx.invoked_subcommand = cmd_name + Command.invoke(self, ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = args and '*' or None + Command.invoke(self, ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command(self, ctx, args): + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail('No such command "%s".' % original_cmd_name) + + return cmd_name, cmd, args[1:] + + def get_command(self, ctx, cmd_name): + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError() + + def list_commands(self, ctx): + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is the + most common way to implement nesting in Click. + + :param commands: a dictionary of commands. + """ + + def __init__(self, name=None, commands=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: the registered subcommands by their exported names. + self.commands = commands or {} + + def add_command(self, cmd, name=None): + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError('Command has no name.') + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + def command(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + def decorator(f): + cmd = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + return decorator + + def group(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + def decorator(f): + cmd = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + return decorator + + def get_command(self, ctx, cmd_name): + return self.commands.get(cmd_name) + + def list_commands(self, ctx): + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__(self, name=None, sources=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: The list of registered multi commands. + self.sources = sources or [] + + def add_source(self, multi_cmd): + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx, cmd_name): + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + return rv + + def list_commands(self, ctx): + rv = set() + for source in self.sources: + rv.update(source.list_commands(ctx)) + return sorted(rv) + + +class Parameter(object): + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. In Click 2.0, the old callback format will still work, + but it will raise a warning to give you change to migrate the + code easier. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: a callback that should be executed after the parameter + was matched. This is called as ``fn(ctx, param, + value)`` and needs to return the value. Before Click + 2.0, the signature was ``(ctx, value)``. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + """ + param_type_name = 'parameter' + + def __init__(self, param_decls=None, type=None, required=False, + default=None, callback=None, nargs=None, metavar=None, + expose_value=True, is_eager=False, envvar=None, + autocompletion=None): + self.name, self.opts, self.secondary_opts = \ + self._parse_decls(param_decls or (), expose_value) + + self.type = convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = False + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self.autocompletion = autocompletion + + @property + def human_readable_name(self): + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + metavar = self.type.get_metavar(self) + if metavar is None: + metavar = self.type.name.upper() + if self.nargs != 1: + metavar += '...' + return metavar + + def get_default(self, ctx): + """Given a context variable this calculates the default value.""" + # Otherwise go with the regular default. + if callable(self.default): + rv = self.default() + else: + rv = self.default + return self.type_cast_value(ctx, rv) + + def add_to_parser(self, parser, ctx): + pass + + def consume_value(self, ctx, opts): + value = opts.get(self.name) + if value is None: + value = self.value_from_envvar(ctx) + if value is None: + value = ctx.lookup_default(self.name) + return value + + def type_cast_value(self, ctx, value): + """Given a value this runs it properly through the type system. + This automatically handles things like `nargs` and `multiple` as + well as composite types. + """ + if self.type.is_composite: + if self.nargs <= 1: + raise TypeError('Attempted to invoke composite type ' + 'but nargs has been set to %s. This is ' + 'not supported; nargs needs to be set to ' + 'a fixed value > 1.' % self.nargs) + if self.multiple: + return tuple(self.type(x or (), self, ctx) for x in value or ()) + return self.type(value or (), self, ctx) + + def _convert(value, level): + if level == 0: + return self.type(value, self, ctx) + return tuple(_convert(x, level - 1) for x in value or ()) + return _convert(value, (self.nargs != 1) + bool(self.multiple)) + + def process_value(self, ctx, value): + """Given a value and context this runs the logic to convert the + value as necessary. + """ + # If the value we were given is None we do nothing. This way + # code that calls this can easily figure out if something was + # not provided. Otherwise it would be converted into an empty + # tuple for multiple invocations which is inconvenient. + if value is not None: + return self.type_cast_value(ctx, value) + + def value_is_missing(self, value): + if value is None: + return True + if (self.nargs != 1 or self.multiple) and value == (): + return True + return False + + def full_process_value(self, ctx, value): + value = self.process_value(ctx, value) + + if value is None and not ctx.resilient_parsing: + value = self.get_default(ctx) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + return value + + def resolve_envvar_value(self, ctx): + if self.envvar is None: + return + if isinstance(self.envvar, (tuple, list)): + for envvar in self.envvar: + rv = os.environ.get(envvar) + if rv is not None: + return rv + else: + return os.environ.get(self.envvar) + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + return rv + + def handle_parse_result(self, ctx, opts, args): + with augment_usage_errors(ctx, param=self): + value = self.consume_value(ctx, opts) + try: + value = self.full_process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + value = None + if self.callback is not None: + try: + value = invoke_param_callback( + self.callback, ctx, self, value) + except Exception: + if not ctx.resilient_parsing: + raise + + if self.expose_value: + ctx.params[self.name] = value + return value, args + + def get_help_record(self, ctx): + pass + + def get_usage_pieces(self, ctx): + return [] + + def get_error_hint(self, ctx): + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return ' / '.join('"%s"' % x for x in hint_list) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: controls if the default value should be shown on the + help page. Normally, defaults are not shown. If this + value is a string, it shows the string instead of the + value. This is particularly useful for dynamic options. + :param show_envvar: controls if an environment variable should be shown on + the help page. Normally, environment variables + are not shown. + :param prompt: if set to `True` or a non empty string then the user will be + prompted for input. If set to `True` the prompt will be the + option name capitalized. + :param confirmation_prompt: if set then the value will need to be confirmed + if it was prompted for. + :param hide_input: if this is `True` then the input on the prompt will be + hidden from the user. This is useful for password + input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + """ + param_type_name = 'option' + + def __init__(self, param_decls=None, show_default=False, + prompt=False, confirmation_prompt=False, + hide_input=False, is_flag=None, flag_value=None, + multiple=False, count=False, allow_from_autoenv=True, + type=None, help=None, hidden=False, show_choices=True, + show_envvar=False, **attrs): + default_is_missing = attrs.get('default', _missing) is _missing + Parameter.__init__(self, param_decls, type=type, **attrs) + + if prompt is True: + prompt_text = self.name.replace('_', ' ').capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.hide_input = hide_input + self.hidden = hidden + + # Flags + if is_flag is None: + if flag_value is not None: + is_flag = True + else: + is_flag = bool(self.secondary_opts) + if is_flag and default_is_missing: + self.default = False + if flag_value is None: + flag_value = not self.default + self.is_flag = is_flag + self.flag_value = flag_value + if self.is_flag and isinstance(self.flag_value, bool) \ + and type is None: + self.type = BOOL + self.is_bool_flag = True + else: + self.is_bool_flag = False + + # Counting + self.count = count + if count: + if type is None: + self.type = IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.multiple = multiple + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + # Sanity check for stuff we don't support + if __debug__: + if self.nargs < 0: + raise TypeError('Options cannot have nargs < 0') + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError('Cannot prompt for flags that are not bools.') + if not self.is_bool_flag and self.secondary_opts: + raise TypeError('Got secondary option for non boolean flag.') + if self.is_bool_flag and self.hide_input \ + and self.prompt is not None: + raise TypeError('Hidden input does not work with boolean ' + 'flag prompts.') + if self.count: + if self.multiple: + raise TypeError('Options cannot be multiple and count ' + 'at the same time.') + elif self.is_flag: + raise TypeError('Options cannot be count and flags at ' + 'the same time.') + + def _parse_decls(self, decls, expose_value): + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if isidentifier(decl): + if name is not None: + raise TypeError('Name defined twice') + name = decl + else: + split_char = decl[:1] == '/' and ';' or '/' + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace('-', '_').lower() + if not isidentifier(name): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError('Could not determine name for option') + + if not opts and not secondary_opts: + raise TypeError('No options defined but a name was passed (%s). ' + 'Did you mean to declare an argument instead ' + 'of an option?' % name) + + return name, opts, secondary_opts + + def add_to_parser(self, parser, ctx): + kwargs = { + 'dest': self.name, + 'nargs': self.nargs, + 'obj': self, + } + + if self.multiple: + action = 'append' + elif self.count: + action = 'count' + else: + action = 'store' + + if self.is_flag: + kwargs.pop('nargs', None) + if self.is_bool_flag and self.secondary_opts: + parser.add_option(self.opts, action=action + '_const', + const=True, **kwargs) + parser.add_option(self.secondary_opts, action=action + + '_const', const=False, **kwargs) + else: + parser.add_option(self.opts, action=action + '_const', + const=self.flag_value, + **kwargs) + else: + kwargs['action'] = action + parser.add_option(self.opts, **kwargs) + + def get_help_record(self, ctx): + if self.hidden: + return + any_prefix_is_slash = [] + + def _write_opts(opts): + rv, any_slashes = join_options(opts) + if any_slashes: + any_prefix_is_slash[:] = [True] + if not self.is_flag and not self.count: + rv += ' ' + self.make_metavar() + return rv + + rv = [_write_opts(self.opts)] + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or '' + extra = [] + if self.show_envvar: + envvar = self.envvar + if envvar is None: + if self.allow_from_autoenv and \ + ctx.auto_envvar_prefix is not None: + envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) + if envvar is not None: + extra.append('env var: %s' % ( + ', '.join('%s' % d for d in envvar) + if isinstance(envvar, (list, tuple)) + else envvar, )) + if self.default is not None and self.show_default: + if isinstance(self.show_default, string_types): + default_string = '({})'.format(self.show_default) + elif isinstance(self.default, (list, tuple)): + default_string = ', '.join('%s' % d for d in self.default) + elif inspect.isfunction(self.default): + default_string = "(dynamic)" + else: + default_string = self.default + extra.append('default: {}'.format(default_string)) + + if self.required: + extra.append('required') + if extra: + help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra)) + + return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help) + + def get_default(self, ctx): + # If we're a non boolean flag out default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value + return None + return Parameter.get_default(self, ctx) + + def prompt_for_value(self, ctx): + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt(self.prompt, default=default, type=self.type, + hide_input=self.hide_input, show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x)) + + def resolve_envvar_value(self, ctx): + rv = Parameter.resolve_envvar_value(self, ctx) + if rv is not None: + return rv + if self.allow_from_autoenv and \ + ctx.auto_envvar_prefix is not None: + envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) + return os.environ.get(envvar) + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is None: + return None + value_depth = (self.nargs != 1) + bool(self.multiple) + if value_depth > 0 and rv is not None: + rv = self.type.split_envvar_value(rv) + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + return rv + + def full_process_value(self, ctx, value): + if value is None and self.prompt is not None \ + and not ctx.resilient_parsing: + return self.prompt_for_value(ctx) + return Parameter.full_process_value(self, ctx, value) + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + param_type_name = 'argument' + + def __init__(self, param_decls, required=None, **attrs): + if required is None: + if attrs.get('default') is not None: + required = False + else: + required = attrs.get('nargs', 1) > 0 + Parameter.__init__(self, param_decls, required=required, **attrs) + if self.default is not None and self.nargs < 0: + raise TypeError('nargs=-1 in combination with a default value ' + 'is not supported.') + + @property + def human_readable_name(self): + if self.metavar is not None: + return self.metavar + return self.name.upper() + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() + if not self.required: + var = '[%s]' % var + if self.nargs != 1: + var += '...' + return var + + def _parse_decls(self, decls, expose_value): + if not decls: + if not expose_value: + return None, [], [] + raise TypeError('Could not determine name for argument') + if len(decls) == 1: + name = arg = decls[0] + name = name.replace('-', '_').lower() + else: + raise TypeError('Arguments take exactly one ' + 'parameter declaration, got %d' % len(decls)) + return name, [arg], [] + + def get_usage_pieces(self, ctx): + return [self.make_metavar()] + + def get_error_hint(self, ctx): + return '"%s"' % self.make_metavar() + + def add_to_parser(self, parser, ctx): + parser.add_argument(dest=self.name, nargs=self.nargs, + obj=self) + + +# Circular dependency between decorators and core +from .decorators import command, group diff --git a/flask/venv/lib/python3.6/site-packages/click/decorators.py b/flask/venv/lib/python3.6/site-packages/click/decorators.py new file mode 100644 index 0000000..c57c530 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/decorators.py @@ -0,0 +1,311 @@ +import sys +import inspect + +from functools import update_wrapper + +from ._compat import iteritems +from ._unicodefun import _check_for_unicode_literals +from .utils import echo +from .globals import get_current_context + + +def pass_context(f): + """Marks a callback as wanting to receive the current context + object as first argument. + """ + def new_func(*args, **kwargs): + return f(get_current_context(), *args, **kwargs) + return update_wrapper(new_func, f) + + +def pass_obj(f): + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + def new_func(*args, **kwargs): + return f(get_current_context().obj, *args, **kwargs) + return update_wrapper(new_func, f) + + +def make_pass_decorator(object_type, ensure=False): + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + def decorator(f): + def new_func(*args, **kwargs): + ctx = get_current_context() + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + if obj is None: + raise RuntimeError('Managed to invoke callback without a ' + 'context object of type %r existing' + % object_type.__name__) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + +def _make_command(f, name, attrs, cls): + if isinstance(f, Command): + raise TypeError('Attempted to convert a callback into a ' + 'command twice.') + try: + params = f.__click_params__ + params.reverse() + del f.__click_params__ + except AttributeError: + params = [] + help = attrs.get('help') + if help is None: + help = inspect.getdoc(f) + if isinstance(help, bytes): + help = help.decode('utf-8') + else: + help = inspect.cleandoc(help) + attrs['help'] = help + _check_for_unicode_literals() + return cls(name=name or f.__name__.lower().replace('_', '-'), + callback=f, params=params, **attrs) + + +def command(name=None, cls=None, **attrs): + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function. If you + want to change that, you can pass the intended name as the first + argument. + + All keyword arguments are forwarded to the underlying command class. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + """ + if cls is None: + cls = Command + def decorator(f): + cmd = _make_command(f, name, attrs, cls) + cmd.__doc__ = f.__doc__ + return cmd + return decorator + + +def group(name=None, **attrs): + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + """ + attrs.setdefault('cls', Group) + return command(name, **attrs) + + +def _param_memo(f, param): + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, '__click_params__'): + f.__click_params__ = [] + f.__click_params__.append(param) + + +def argument(*param_decls, **attrs): + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + def decorator(f): + ArgumentClass = attrs.pop('cls', Argument) + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + return decorator + + +def option(*param_decls, **attrs): + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + def decorator(f): + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + + if 'help' in option_attrs: + option_attrs['help'] = inspect.cleandoc(option_attrs['help']) + OptionClass = option_attrs.pop('cls', Option) + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + return decorator + + +def confirmation_option(*param_decls, **attrs): + """Shortcut for confirmation prompts that can be ignored by passing + ``--yes`` as parameter. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + def callback(ctx, param, value): + if not value: + ctx.abort() + + @click.command() + @click.option('--yes', is_flag=True, callback=callback, + expose_value=False, prompt='Do you want to continue?') + def dropdb(): + pass + """ + def decorator(f): + def callback(ctx, param, value): + if not value: + ctx.abort() + attrs.setdefault('is_flag', True) + attrs.setdefault('callback', callback) + attrs.setdefault('expose_value', False) + attrs.setdefault('prompt', 'Do you want to continue?') + attrs.setdefault('help', 'Confirm the action without prompting.') + return option(*(param_decls or ('--yes',)), **attrs)(f) + return decorator + + +def password_option(*param_decls, **attrs): + """Shortcut for password prompts. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + @click.command() + @click.option('--password', prompt=True, confirmation_prompt=True, + hide_input=True) + def changeadmin(password): + pass + """ + def decorator(f): + attrs.setdefault('prompt', True) + attrs.setdefault('confirmation_prompt', True) + attrs.setdefault('hide_input', True) + return option(*(param_decls or ('--password',)), **attrs)(f) + return decorator + + +def version_option(version=None, *param_decls, **attrs): + """Adds a ``--version`` option which immediately ends the program + printing out the version number. This is implemented as an eager + option that prints the version and exits the program in the callback. + + :param version: the version number to show. If not provided Click + attempts an auto discovery via setuptools. + :param prog_name: the name of the program (defaults to autodetection) + :param message: custom message to show instead of the default + (``'%(prog)s, version %(version)s'``) + :param others: everything else is forwarded to :func:`option`. + """ + if version is None: + if hasattr(sys, '_getframe'): + module = sys._getframe(1).f_globals.get('__name__') + else: + module = '' + + def decorator(f): + prog_name = attrs.pop('prog_name', None) + message = attrs.pop('message', '%(prog)s, version %(version)s') + + def callback(ctx, param, value): + if not value or ctx.resilient_parsing: + return + prog = prog_name + if prog is None: + prog = ctx.find_root().info_name + ver = version + if ver is None: + try: + import pkg_resources + except ImportError: + pass + else: + for dist in pkg_resources.working_set: + scripts = dist.get_entry_map().get('console_scripts') or {} + for script_name, entry_point in iteritems(scripts): + if entry_point.module_name == module: + ver = dist.version + break + if ver is None: + raise RuntimeError('Could not determine version') + echo(message % { + 'prog': prog, + 'version': ver, + }, color=ctx.color) + ctx.exit() + + attrs.setdefault('is_flag', True) + attrs.setdefault('expose_value', False) + attrs.setdefault('is_eager', True) + attrs.setdefault('help', 'Show the version and exit.') + attrs['callback'] = callback + return option(*(param_decls or ('--version',)), **attrs)(f) + return decorator + + +def help_option(*param_decls, **attrs): + """Adds a ``--help`` option which immediately ends the program + printing out the help page. This is usually unnecessary to add as + this is added by default to all commands unless suppressed. + + Like :func:`version_option`, this is implemented as eager option that + prints in the callback and exits. + + All arguments are forwarded to :func:`option`. + """ + def decorator(f): + def callback(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + attrs.setdefault('is_flag', True) + attrs.setdefault('expose_value', False) + attrs.setdefault('help', 'Show this message and exit.') + attrs.setdefault('is_eager', True) + attrs['callback'] = callback + return option(*(param_decls or ('--help',)), **attrs)(f) + return decorator + + +# Circular dependencies between core and decorators +from .core import Command, Group, Argument, Option diff --git a/flask/venv/lib/python3.6/site-packages/click/exceptions.py b/flask/venv/lib/python3.6/site-packages/click/exceptions.py new file mode 100644 index 0000000..6fa1765 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/exceptions.py @@ -0,0 +1,235 @@ +from ._compat import PY2, filename_to_ui, get_text_stderr +from .utils import echo + + +def _join_param_hints(param_hint): + if isinstance(param_hint, (tuple, list)): + return ' / '.join('"%s"' % x for x in param_hint) + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception + exit_code = 1 + + def __init__(self, message): + ctor_msg = message + if PY2: + if ctor_msg is not None: + ctor_msg = ctor_msg.encode('utf-8') + Exception.__init__(self, ctor_msg) + self.message = message + + def format_message(self): + return self.message + + def __str__(self): + return self.message + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.message.encode('utf-8') + + def show(self, file=None): + if file is None: + file = get_text_stderr() + echo('Error: %s' % self.format_message(), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + exit_code = 2 + + def __init__(self, message, ctx=None): + ClickException.__init__(self, message) + self.ctx = ctx + self.cmd = self.ctx and self.ctx.command or None + + def show(self, file=None): + if file is None: + file = get_text_stderr() + color = None + hint = '' + if (self.cmd is not None and + self.cmd.get_help_option(self.ctx) is not None): + hint = ('Try "%s %s" for help.\n' + % (self.ctx.command_path, self.ctx.help_option_names[0])) + if self.ctx is not None: + color = self.ctx.color + echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color) + echo('Error: %s' % self.format_message(), file=file, color=color) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__(self, message, ctx=None, param=None, + param_hint=None): + UsageError.__init__(self, message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + return 'Invalid value: %s' % self.message + param_hint = _join_param_hints(param_hint) + + return 'Invalid value for %s: %s' % (param_hint, self.message) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__(self, message=None, ctx=None, param=None, + param_hint=None, param_type=None): + BadParameter.__init__(self, message, ctx, param, param_hint) + self.param_type = param_type + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + param_hint = None + param_hint = _join_param_hints(param_hint) + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += '. ' + msg_extra + else: + msg = msg_extra + + return 'Missing %s%s%s%s' % ( + param_type, + param_hint and ' %s' % param_hint or '', + msg and '. ' or '.', + msg or '', + ) + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__(self, option_name, message=None, possibilities=None, + ctx=None): + if message is None: + message = 'no such option: %s' % option_name + UsageError.__init__(self, message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self): + bits = [self.message] + if self.possibilities: + if len(self.possibilities) == 1: + bits.append('Did you mean %s?' % self.possibilities[0]) + else: + possibilities = sorted(self.possibilities) + bits.append('(Possible options: %s)' % ', '.join(possibilities)) + return ' '.join(bits) + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__(self, option_name, message, ctx=None): + UsageError.__init__(self, message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + def __init__(self, message, ctx=None): + UsageError.__init__(self, message, ctx) + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename, hint=None): + ui_filename = filename_to_ui(filename) + if hint is None: + hint = 'unknown error' + ClickException.__init__(self, hint) + self.ui_filename = ui_filename + self.filename = filename + + def format_message(self): + return 'Could not open file %s: %s' % (self.ui_filename, self.message) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + def __init__(self, code=0): + self.exit_code = code diff --git a/flask/venv/lib/python3.6/site-packages/click/formatting.py b/flask/venv/lib/python3.6/site-packages/click/formatting.py new file mode 100644 index 0000000..a3d6a4d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/formatting.py @@ -0,0 +1,256 @@ +from contextlib import contextmanager +from .termui import get_terminal_size +from .parser import split_opt +from ._compat import term_len + + +# Can force a width. This is used by the test system +FORCED_WIDTH = None + + +def measure_table(rows): + widths = {} + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows(rows, col_count): + for row in rows: + row = tuple(row) + yield row + ('',) * (col_count - len(row)) + + +def wrap_text(text, width=78, initial_indent='', subsequent_indent='', + preserve_paragraphs=False): + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + text = text.expandtabs() + wrapper = TextWrapper(width, initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False) + if not preserve_paragraphs: + return wrapper.fill(text) + + p = [] + buf = [] + indent = None + + def _flush_par(): + if not buf: + return + if buf[0].strip() == '\b': + p.append((indent or 0, True, '\n'.join(buf[1:]))) + else: + p.append((indent or 0, False, ' '.join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(' ' * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return '\n\n'.join(rv) + + +class HelpFormatter(object): + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__(self, indent_increment=2, width=None, max_width=None): + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(get_terminal_size()[0], max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer = [] + + def write(self, string): + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self): + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self): + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage(self, prog, args='', prefix='Usage: '): + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: the prefix for the first line. + """ + usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = ' ' * term_len(usage_prefix) + self.write(wrap_text(args, text_width, + initial_indent=usage_prefix, + subsequent_indent=indent)) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write('\n') + indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) + self.write(wrap_text(args, text_width, + initial_indent=indent, + subsequent_indent=indent)) + + self.write('\n') + + def write_heading(self, heading): + """Writes a heading into the buffer.""" + self.write('%*s%s:\n' % (self.current_indent, '', heading)) + + def write_paragraph(self): + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write('\n') + + def write_text(self, text): + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + text_width = max(self.width - self.current_indent, 11) + indent = ' ' * self.current_indent + self.write(wrap_text(text, text_width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True)) + self.write('\n') + + def write_dl(self, rows, col_max=30, col_spacing=2): + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError('Expected two columns for definition list') + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write('%*s%s' % (self.current_indent, '', first)) + if not second: + self.write('\n') + continue + if term_len(first) <= first_col - col_spacing: + self.write(' ' * (first_col - term_len(first))) + else: + self.write('\n') + self.write(' ' * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + lines = iter(wrap_text(second, text_width).splitlines()) + if lines: + self.write(next(lines) + '\n') + for line in lines: + self.write('%*s%s\n' % ( + first_col + self.current_indent, '', line)) + else: + self.write('\n') + + @contextmanager + def section(self, name): + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self): + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self): + """Returns the buffer contents.""" + return ''.join(self.buffer) + + +def join_options(options): + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + for opt in options: + prefix = split_opt(opt)[0] + if prefix == '/': + any_prefix_is_slash = True + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + + rv = ', '.join(x[1] for x in rv) + return rv, any_prefix_is_slash diff --git a/flask/venv/lib/python3.6/site-packages/click/globals.py b/flask/venv/lib/python3.6/site-packages/click/globals.py new file mode 100644 index 0000000..843b594 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/globals.py @@ -0,0 +1,48 @@ +from threading import local + + +_local = local() + + +def get_current_context(silent=False): + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: is set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return getattr(_local, 'stack')[-1] + except (AttributeError, IndexError): + if not silent: + raise RuntimeError('There is no active click context.') + + +def push_context(ctx): + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault('stack', []).append(ctx) + + +def pop_context(): + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color=None): + """"Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + ctx = get_current_context(silent=True) + if ctx is not None: + return ctx.color diff --git a/flask/venv/lib/python3.6/site-packages/click/parser.py b/flask/venv/lib/python3.6/site-packages/click/parser.py new file mode 100644 index 0000000..1c3ae9c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/parser.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +""" +click.parser +~~~~~~~~~~~~ + +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. +""" + +import re +from collections import deque +from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ + BadArgumentUsage + + +def _unpack_args(args, nargs_spec): + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv = [] + spos = None + + def _fetch(c): + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError('Cannot have two nargs < 0') + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1:] = reversed(rv[spos + 1:]) + + return tuple(rv), list(args) + + +def _error_opt_args(nargs, opt): + if nargs == 1: + raise BadOptionUsage(opt, '%s option requires an argument' % opt) + raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs)) + + +def split_opt(opt): + first = opt[:1] + if first.isalnum(): + return '', opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt, ctx): + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return prefix + ctx.token_normalize_func(opt) + + +def split_arg_string(string): + """Given an argument string this attempts to split it into small parts.""" + rv = [] + for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" + r'|"([^"\\]*(?:\\.[^"\\]*)*)"' + r'|\S+)\s*', string, re.S): + arg = match.group().strip() + if arg[:1] == arg[-1:] and arg[:1] in '"\'': + arg = arg[1:-1].encode('ascii', 'backslashreplace') \ + .decode('unicode-escape') + try: + arg = type(string)(arg) + except UnicodeError: + pass + rv.append(arg) + return rv + + +class Option(object): + + def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError('Invalid start character for option (%s)' + % opt) + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = 'store' + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self): + return self.action in ('store', 'append') + + def process(self, value, state): + if self.action == 'store': + state.opts[self.dest] = value + elif self.action == 'store_const': + state.opts[self.dest] = self.const + elif self.action == 'append': + state.opts.setdefault(self.dest, []).append(value) + elif self.action == 'append_const': + state.opts.setdefault(self.dest, []).append(self.const) + elif self.action == 'count': + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 + else: + raise ValueError('unknown action %r' % self.action) + state.order.append(self.obj) + + +class Argument(object): + + def __init__(self, dest, nargs=1, obj=None): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process(self, value, state): + if self.nargs > 1: + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage('argument %s takes %d values' + % (self.dest, self.nargs)) + state.opts[self.dest] = value + state.order.append(self.obj) + + +class ParsingState(object): + + def __init__(self, rargs): + self.opts = {} + self.largs = [] + self.rargs = rargs + self.order = [] + + +class OptionParser(object): + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx=None): + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + self._short_opt = {} + self._long_opt = {} + self._opt_prefixes = set(['-', '--']) + self._args = [] + + def add_option(self, opts, dest, action=None, nargs=1, const=None, + obj=None): + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``appnd_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(opts, dest, action=action, nargs=nargs, + const=const, obj=obj) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument(self, dest, nargs=1, obj=None): + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) + + def parse_args(self, args): + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state): + pargs, args = _unpack_args(state.largs + state.rargs, + [x.nargs for x in self._args]) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state): + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == '--': + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt(self, opt, explicit_value, state): + if opt not in self._long_opt: + possibilities = [word for word in self._long_opt + if word.startswith(opt)] + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + elif explicit_value is not None: + raise BadOptionUsage(opt, '%s option does not take a value' % opt) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg, state): + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(prefix + ch, self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(prefix + ''.join(unknown_options)) + + def _process_opts(self, arg, state): + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if '=' in arg: + long_opt, explicit_value = arg.split('=', 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + return self._match_short_opt(arg, state) + if not self.ignore_unknown_options: + raise + state.largs.append(arg) diff --git a/flask/venv/lib/python3.6/site-packages/click/termui.py b/flask/venv/lib/python3.6/site-packages/click/termui.py new file mode 100644 index 0000000..bf9a3aa --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/termui.py @@ -0,0 +1,606 @@ +import os +import sys +import struct +import inspect +import itertools + +from ._compat import raw_input, text_type, string_types, \ + isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN +from .utils import echo +from .exceptions import Abort, UsageError +from .types import convert_type, Choice, Path +from .globals import resolve_color_default + + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func = raw_input + +_ansi_colors = { + 'black': 30, + 'red': 31, + 'green': 32, + 'yellow': 33, + 'blue': 34, + 'magenta': 35, + 'cyan': 36, + 'white': 37, + 'reset': 39, + 'bright_black': 90, + 'bright_red': 91, + 'bright_green': 92, + 'bright_yellow': 93, + 'bright_blue': 94, + 'bright_magenta': 95, + 'bright_cyan': 96, + 'bright_white': 97, +} +_ansi_reset_all = '\033[0m' + + +def hidden_prompt_func(prompt): + import getpass + return getpass.getpass(prompt) + + +def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None): + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += ' (' + ", ".join(map(str, type.choices)) + ')' + if default is not None and show_default: + prompt = '%s [%s]' % (prompt, default) + return prompt + suffix + + +def prompt(text, default=None, hide_input=False, confirmation_prompt=False, + type=None, value_proc=None, prompt_suffix=': ', show_default=True, + err=False, show_choices=True): + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending a interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 7.0 + Added the show_choices parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: asks for confirmation for the value. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + """ + result = None + + def prompt_func(text): + f = hide_input and hidden_prompt_func or visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text, nl=False, err=err) + return f('') + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type) + + while 1: + while 1: + value = prompt_func(prompt) + if value: + break + elif default is not None: + if isinstance(value_proc, Path): + # validate Path default value(exists, dir_okay etc.) + value = default + break + return default + try: + result = value_proc(value) + except UsageError as e: + echo('Error: %s' % e.message, err=err) + continue + if not confirmation_prompt: + return result + while 1: + value2 = prompt_func('Repeat for confirmation: ') + if value2: + break + if value == value2: + return result + echo('Error: the two entered values do not match', err=err) + + +def confirm(text, default=False, abort=False, prompt_suffix=': ', + show_default=True, err=False): + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the question to ask. + :param default: the default for the prompt. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + prompt = _build_prompt(text, prompt_suffix, show_default, + default and 'Y/n' or 'y/N') + while 1: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt, nl=False, err=err) + value = visible_prompt_func('').lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() + if value in ('y', 'yes'): + rv = True + elif value in ('n', 'no'): + rv = False + elif value == '': + rv = default + else: + echo('Error: invalid input', err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def get_terminal_size(): + """Returns the current size of the terminal as tuple in the form + ``(width, height)`` in columns and rows. + """ + # If shutil has get_terminal_size() (Python 3.3 and later) use that + if sys.version_info >= (3, 3): + import shutil + shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None) + if shutil_get_terminal_size: + sz = shutil_get_terminal_size() + return sz.columns, sz.lines + + # We provide a sensible default for get_winterm_size() when being invoked + # inside a subprocess. Without this, it would not provide a useful input. + if get_winterm_size is not None: + size = get_winterm_size() + if size == (0, 0): + return (79, 24) + else: + return size + + def ioctl_gwinsz(fd): + try: + import fcntl + import termios + cr = struct.unpack( + 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) + except Exception: + return + return cr + + cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + try: + cr = ioctl_gwinsz(fd) + finally: + os.close(fd) + except Exception: + pass + if not cr or not cr[0] or not cr[1]: + cr = (os.environ.get('LINES', 25), + os.environ.get('COLUMNS', DEFAULT_COLUMNS)) + return int(cr[1]), int(cr[0]) + + +def echo_via_pager(text_or_generator, color=None): + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = text_or_generator() + elif isinstance(text_or_generator, string_types): + i = [text_or_generator] + else: + i = iter(text_or_generator) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, string_types) else text_type(el) + for el in i) + + from ._termui_impl import pager + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar(iterable=None, length=None, label=None, show_eta=True, + show_percent=None, show_pos=False, + item_show_func=None, fill_char='#', empty_char='-', + bar_template='%(label)s [%(bar)s] %(info)s', + info_sep=' ', width=36, file=None, color=None): + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already displayed. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `color` parameter. Added a `update` method to the + progressbar object. + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: a function called with the current item which + can return a string to show the current item + next to the progress bar. Note that the current + item can be `None`! + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: the file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + """ + from ._termui_impl import ProgressBar + color = resolve_color_default(color) + return ProgressBar(iterable=iterable, length=length, show_eta=show_eta, + show_percent=show_percent, show_pos=show_pos, + item_show_func=item_show_func, fill_char=fill_char, + empty_char=empty_char, bar_template=bar_template, + info_sep=info_sep, file=file, label=label, + width=width, color=color) + + +def clear(): + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + # If we're on Windows and we don't have colorama available, then we + # clear the screen by shelling out. Otherwise we can use an escape + # sequence. + if WIN: + os.system('cls') + else: + sys.stdout.write('\033[2J\033[1;1H') + + +def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, + blink=None, reverse=None, reset=True): + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + .. versionadded:: 2.0 + + .. versionadded:: 7.0 + Added support for bright colors. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + """ + bits = [] + if fg: + try: + bits.append('\033[%dm' % (_ansi_colors[fg])) + except KeyError: + raise TypeError('Unknown color %r' % fg) + if bg: + try: + bits.append('\033[%dm' % (_ansi_colors[bg] + 10)) + except KeyError: + raise TypeError('Unknown color %r' % bg) + if bold is not None: + bits.append('\033[%dm' % (1 if bold else 22)) + if dim is not None: + bits.append('\033[%dm' % (2 if dim else 22)) + if underline is not None: + bits.append('\033[%dm' % (4 if underline else 24)) + if blink is not None: + bits.append('\033[%dm' % (5 if blink else 25)) + if reverse is not None: + bits.append('\033[%dm' % (7 if reverse else 27)) + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return ''.join(bits) + + +def unstyle(text): + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho(message=None, file=None, nl=True, err=False, color=None, **styles): + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + .. versionadded:: 2.0 + """ + if message is not None: + message = style(message, **styles) + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit(text=None, editor=None, env=None, require_save=True, + extension='.txt', filename=None): + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + editor = Editor(editor=editor, env=env, require_save=require_save, + extension=extension) + if filename is None: + return editor.edit(text) + editor.edit_file(filename) + + +def launch(url, wait=False, locate=False): + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: waits for the program to stop. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar = None + + +def getchar(echo=False): + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + f = _getchar + if f is None: + from ._termui_impl import getchar as f + return f(echo) + + +def raw_terminal(): + from ._termui_impl import raw_terminal as f + return f() + + +def pause(info='Press any key to continue ...', err=False): + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: the info string to print before pausing. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/flask/venv/lib/python3.6/site-packages/click/testing.py b/flask/venv/lib/python3.6/site-packages/click/testing.py new file mode 100644 index 0000000..1b2924e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/testing.py @@ -0,0 +1,374 @@ +import os +import sys +import shutil +import tempfile +import contextlib +import shlex + +from ._compat import iteritems, PY2, string_types + + +# If someone wants to vendor click, we want to ensure the +# correct package is discovered. Ideally we could use a +# relative import here but unfortunately Python does not +# support that. +clickpkg = sys.modules[__name__.rsplit('.', 1)[0]] + + +if PY2: + from cStringIO import StringIO +else: + import io + from ._compat import _find_binary_reader + + +class EchoingStdin(object): + + def __init__(self, input, output): + self._input = input + self._output = output + + def __getattr__(self, x): + return getattr(self._input, x) + + def _echo(self, rv): + self._output.write(rv) + return rv + + def read(self, n=-1): + return self._echo(self._input.read(n)) + + def readline(self, n=-1): + return self._echo(self._input.readline(n)) + + def readlines(self): + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self): + return iter(self._echo(x) for x in self._input) + + def __repr__(self): + return repr(self._input) + + +def make_input_stream(input, charset): + # Is already an input stream. + if hasattr(input, 'read'): + if PY2: + return input + rv = _find_binary_reader(input) + if rv is not None: + return rv + raise TypeError('Could not find binary reader for input stream.') + + if input is None: + input = b'' + elif not isinstance(input, bytes): + input = input.encode(charset) + if PY2: + return StringIO(input) + return io.BytesIO(input) + + +class Result(object): + """Holds the captured result of an invoked CLI script.""" + + def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code, + exception, exc_info=None): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or False(y) if not available + self.stderr_bytes = stderr_bytes + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self): + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self): + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, 'replace') \ + .replace('\r\n', '\n') + + @property + def stderr(self): + """The standard error as unicode string.""" + if not self.stderr_bytes: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, 'replace') \ + .replace('\r\n', '\n') + + + def __repr__(self): + return '<%s %s>' % ( + type(self).__name__, + self.exception and repr(self.exception) or 'okay', + ) + + +class CliRunner(object): + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. This is + UTF-8 by default and should not be changed currently as + the reporting to Click only works in Python 2 properly. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__(self, charset=None, env=None, echo_stdin=False, + mix_stderr=True): + if charset is None: + charset = 'utf-8' + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli): + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or 'root' + + def make_env(self, overrides=None): + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation(self, input=None, env=None, color=False): + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + input = make_input_stream(input, self.charset) + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = clickpkg.formatting.FORCED_WIDTH + clickpkg.formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + if PY2: + bytes_output = StringIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + sys.stdout = bytes_output + if not self.mix_stderr: + bytes_error = StringIO() + sys.stderr = bytes_error + else: + bytes_output = io.BytesIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + input = io.TextIOWrapper(input, encoding=self.charset) + sys.stdout = io.TextIOWrapper( + bytes_output, encoding=self.charset) + if not self.mix_stderr: + bytes_error = io.BytesIO() + sys.stderr = io.TextIOWrapper( + bytes_error, encoding=self.charset) + + if self.mix_stderr: + sys.stderr = sys.stdout + + sys.stdin = input + + def visible_input(prompt=None): + sys.stdout.write(prompt or '') + val = input.readline().rstrip('\r\n') + sys.stdout.write(val + '\n') + sys.stdout.flush() + return val + + def hidden_input(prompt=None): + sys.stdout.write((prompt or '') + '\n') + sys.stdout.flush() + return input.readline().rstrip('\r\n') + + def _getchar(echo): + char = sys.stdin.read(1) + if echo: + sys.stdout.write(char) + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi(stream=None, color=None): + if color is None: + return not default_color + return not color + + old_visible_prompt_func = clickpkg.termui.visible_prompt_func + old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func + old__getchar_func = clickpkg.termui._getchar + old_should_strip_ansi = clickpkg.utils.should_strip_ansi + clickpkg.termui.visible_prompt_func = visible_input + clickpkg.termui.hidden_prompt_func = hidden_input + clickpkg.termui._getchar = _getchar + clickpkg.utils.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in iteritems(env): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, not self.mix_stderr and bytes_error) + finally: + for key, value in iteritems(old_env): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + clickpkg.termui.visible_prompt_func = old_visible_prompt_func + clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func + clickpkg.termui._getchar = old__getchar_func + clickpkg.utils.should_strip_ansi = old_should_strip_ansi + clickpkg.formatting.FORCED_WIDTH = old_forced_width + + def invoke(self, cli, args=None, input=None, env=None, + catch_exceptions=True, color=False, mix_stderr=False, **extra): + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + .. versionadded:: 3.0 + The ``catch_exceptions`` parameter was added. + + .. versionchanged:: 3.0 + The result object now has an `exc_info` attribute with the + traceback if available. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + exception = None + exit_code = 0 + + if isinstance(args, string_types): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + exit_code = e.code + if exit_code is None: + exit_code = 0 + + if exit_code != 0: + exception = e + + if not isinstance(exit_code, int): + sys.stdout.write(str(exit_code)) + sys.stdout.write('\n') + exit_code = 1 + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + stderr = outstreams[1] and outstreams[1].getvalue() + + return Result(runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + exit_code=exit_code, + exception=exception, + exc_info=exc_info) + + @contextlib.contextmanager + def isolated_filesystem(self): + """A context manager that creates a temporary folder and changes + the current working directory to it for isolated filesystem tests. + """ + cwd = os.getcwd() + t = tempfile.mkdtemp() + os.chdir(t) + try: + yield t + finally: + os.chdir(cwd) + try: + shutil.rmtree(t) + except (OSError, IOError): + pass diff --git a/flask/venv/lib/python3.6/site-packages/click/types.py b/flask/venv/lib/python3.6/site-packages/click/types.py new file mode 100644 index 0000000..1f88032 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/types.py @@ -0,0 +1,668 @@ +import os +import stat +from datetime import datetime + +from ._compat import open_stream, text_type, filename_to_ui, \ + get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 +from .exceptions import BadParameter +from .utils import safecall, LazyFile + + +class ParamType(object): + """Helper for converting values through types. The following is + necessary for a valid type: + + * it needs a name + * it needs to pass through None unchanged + * it needs to convert from a string + * it needs to convert its result type through unchanged + (eg: needs to be idempotent) + * it needs to be able to deal with param and context being `None`. + This can be the case when the object is used with prompt + inputs. + """ + is_composite = False + + #: the descriptive name of this type + name = None + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter = None + + def __call__(self, value, param=None, ctx=None): + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param): + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param): + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert(self, value, param, ctx): + """Converts the value. This is not invoked for values that are + `None` (the missing value). + """ + return value + + def split_envvar_value(self, rv): + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or '').split(self.envvar_list_splitter) + + def fail(self, message, param=None, ctx=None): + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self): + raise NotImplementedError() + + +class FuncParamType(ParamType): + + def __init__(self, func): + self.name = func.__name__ + self.func = func + + def convert(self, value, param, ctx): + try: + return self.func(value) + except ValueError: + try: + value = text_type(value) + except UnicodeError: + value = str(value).decode('utf-8', 'replace') + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = 'text' + + def convert(self, value, param, ctx): + return value + + def __repr__(self): + return 'UNPROCESSED' + + +class StringParamType(ParamType): + name = 'text' + + def convert(self, value, param, ctx): + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode('utf-8', 'replace') + return value + return value + + def __repr__(self): + return 'STRING' + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = 'choice' + + def __init__(self, choices, case_sensitive=True): + self.choices = choices + self.case_sensitive = case_sensitive + + def get_metavar(self, param): + return '[%s]' % '|'.join(self.choices) + + def get_missing_message(self, param): + return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices) + + def convert(self, value, param, ctx): + # Exact match + if value in self.choices: + return value + + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = self.choices + + if ctx is not None and \ + ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = [ctx.token_normalize_func(choice) for choice in + self.choices] + + if not self.case_sensitive: + normed_value = normed_value.lower() + normed_choices = [choice.lower() for choice in normed_choices] + + if normed_value in normed_choices: + return normed_value + + self.fail('invalid choice: %s. (choose from %s)' % + (value, ', '.join(self.choices)), param, ctx) + + def __repr__(self): + return 'Choice(%r)' % list(self.choices) + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + name = 'datetime' + + def __init__(self, formats=None): + self.formats = formats or [ + '%Y-%m-%d', + '%Y-%m-%dT%H:%M:%S', + '%Y-%m-%d %H:%M:%S' + ] + + def get_metavar(self, param): + return '[{}]'.format('|'.join(self.formats)) + + def _try_to_convert_date(self, value, format): + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert(self, value, param, ctx): + # Exact match + for format in self.formats: + dtime = self._try_to_convert_date(value, format) + if dtime: + return dtime + + self.fail( + 'invalid datetime format: {}. (choose from {})'.format( + value, ', '.join(self.formats))) + + def __repr__(self): + return 'DateTime' + + +class IntParamType(ParamType): + name = 'integer' + + def convert(self, value, param, ctx): + try: + return int(value) + except (ValueError, UnicodeError): + self.fail('%s is not a valid integer' % value, param, ctx) + + def __repr__(self): + return 'INT' + + +class IntRange(IntParamType): + """A parameter that works similar to :data:`click.INT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + name = 'integer range' + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = IntParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if self.min is not None and rv < self.min or \ + self.max is not None and rv > self.max: + if self.min is None: + self.fail('%s is bigger than the maximum valid value ' + '%s.' % (rv, self.max), param, ctx) + elif self.max is None: + self.fail('%s is smaller than the minimum valid value ' + '%s.' % (rv, self.min), param, ctx) + else: + self.fail('%s is not in the valid range of %s to %s.' + % (rv, self.min, self.max), param, ctx) + return rv + + def __repr__(self): + return 'IntRange(%r, %r)' % (self.min, self.max) + + +class FloatParamType(ParamType): + name = 'float' + + def convert(self, value, param, ctx): + try: + return float(value) + except (UnicodeError, ValueError): + self.fail('%s is not a valid floating point value' % + value, param, ctx) + + def __repr__(self): + return 'FLOAT' + + +class FloatRange(FloatParamType): + """A parameter that works similar to :data:`click.FLOAT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + name = 'float range' + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = FloatParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if self.min is not None and rv < self.min or \ + self.max is not None and rv > self.max: + if self.min is None: + self.fail('%s is bigger than the maximum valid value ' + '%s.' % (rv, self.max), param, ctx) + elif self.max is None: + self.fail('%s is smaller than the minimum valid value ' + '%s.' % (rv, self.min), param, ctx) + else: + self.fail('%s is not in the valid range of %s to %s.' + % (rv, self.min, self.max), param, ctx) + return rv + + def __repr__(self): + return 'FloatRange(%r, %r)' % (self.min, self.max) + + +class BoolParamType(ParamType): + name = 'boolean' + + def convert(self, value, param, ctx): + if isinstance(value, bool): + return bool(value) + value = value.lower() + if value in ('true', 't', '1', 'yes', 'y'): + return True + elif value in ('false', 'f', '0', 'no', 'n'): + return False + self.fail('%s is not a valid boolean' % value, param, ctx) + + def __repr__(self): + return 'BOOL' + + +class UUIDParameterType(ParamType): + name = 'uuid' + + def convert(self, value, param, ctx): + import uuid + try: + if PY2 and isinstance(value, text_type): + value = value.encode('ascii') + return uuid.UUID(value) + except (UnicodeError, ValueError): + self.fail('%s is not a valid UUID value' % value, param, ctx) + + def __repr__(self): + return 'UUID' + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + name = 'filename' + envvar_list_splitter = os.path.pathsep + + def __init__(self, mode='r', encoding=None, errors='strict', lazy=None, + atomic=False): + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def resolve_lazy_flag(self, value): + if self.lazy is not None: + return self.lazy + if value == '-': + return False + elif 'w' in self.mode: + return True + return False + + def convert(self, value, param, ctx): + try: + if hasattr(value, 'read') or hasattr(value, 'write'): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f = LazyFile(value, self.mode, self.encoding, self.errors, + atomic=self.atomic) + if ctx is not None: + ctx.call_on_close(f.close_intelligently) + return f + + f, should_close = open_stream(value, self.mode, + self.encoding, self.errors, + atomic=self.atomic) + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + return f + except (IOError, OSError) as e: + self.fail('Could not open file: %s: %s' % ( + filename_to_ui(value), + get_streerror(e), + ), param, ctx) + + +class Path(ParamType): + """The path type is similar to the :class:`File` type but it performs + different checks. First of all, instead of returning an open file + handle it returns just the filename. Secondly, it can perform various + basic checks about what the file or directory should be. + + .. versionchanged:: 6.0 + `allow_dash` was added. + + :param exists: if set to true, the file or directory needs to exist for + this value to be valid. If this is not required and a + file does indeed not exist, then all further checks are + silently skipped. + :param file_okay: controls if a file is a possible value. + :param dir_okay: controls if a directory is a possible value. + :param writable: if true, a writable check is performed. + :param readable: if true, a readable check is performed. + :param resolve_path: if this is true, then the path is fully resolved + before the value is passed onwards. This means + that it's absolute and symlinks are resolved. It + will not expand a tilde-prefix, as this is + supposed to be done by the shell only. + :param allow_dash: If this is set to `True`, a single dash to indicate + standard streams is permitted. + :param path_type: optionally a string type that should be used to + represent the path. The default is `None` which + means the return value will be either bytes or + unicode depending on what makes most sense given the + input data Click deals with. + """ + envvar_list_splitter = os.path.pathsep + + def __init__(self, exists=False, file_okay=True, dir_okay=True, + writable=False, readable=True, resolve_path=False, + allow_dash=False, path_type=None): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.writable = writable + self.readable = readable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = 'file' + self.path_type = 'File' + elif self.dir_okay and not self.file_okay: + self.name = 'directory' + self.path_type = 'Directory' + else: + self.name = 'path' + self.path_type = 'Path' + + def coerce_path_result(self, rv): + if self.type is not None and not isinstance(rv, self.type): + if self.type is text_type: + rv = rv.decode(get_filesystem_encoding()) + else: + rv = rv.encode(get_filesystem_encoding()) + return rv + + def convert(self, value, param, ctx): + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-') + + if not is_dash: + if self.resolve_path: + rv = os.path.realpath(rv) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail('%s "%s" does not exist.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail('%s "%s" is a file.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail('%s "%s" is a directory.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if self.writable and not os.access(value, os.W_OK): + self.fail('%s "%s" is not writable.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + if self.readable and not os.access(value, os.R_OK): + self.fail('%s "%s" is not readable.' % ( + self.path_type, + filename_to_ui(value) + ), param, ctx) + + return self.coerce_path_result(rv) + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types): + self.types = [convert_type(ty) for ty in types] + + @property + def name(self): + return "<" + " ".join(ty.name for ty in self.types) + ">" + + @property + def arity(self): + return len(self.types) + + def convert(self, value, param, ctx): + if len(value) != len(self.types): + raise TypeError('It would appear that nargs is set to conflict ' + 'with the composite type arity.') + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty, default=None): + """Converts a callable or python ty into the most appropriate param + ty. + """ + guessed_type = False + if ty is None and default is not None: + if isinstance(default, tuple): + ty = tuple(map(type, default)) + else: + ty = type(default) + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + if isinstance(ty, ParamType): + return ty + if ty is text_type or ty is str or ty is None: + return STRING + if ty is int: + return INT + # Booleans are only okay if not guessed. This is done because for + # flags the default value is actually a bit of a lie in that it + # indicates which of the flags is the one we want. See get_default() + # for more information. + if ty is bool and not guessed_type: + return BOOL + if ty is float: + return FLOAT + if guessed_type: + return STRING + + # Catch a common mistake + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError('Attempted to use an uninstantiated ' + 'parameter type (%s).' % ty) + except TypeError: + pass + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but internally +#: no string conversion takes place. This is necessary to achieve the +#: same bytes/unicode behavior on Python 2/3 in situations where you want +#: to not convert argument types. This is usually useful when working +#: with file paths as they can appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/flask/venv/lib/python3.6/site-packages/click/utils.py b/flask/venv/lib/python3.6/site-packages/click/utils.py new file mode 100644 index 0000000..fc84369 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/click/utils.py @@ -0,0 +1,440 @@ +import os +import sys + +from .globals import resolve_color_default + +from ._compat import text_type, open_stream, get_filesystem_encoding, \ + get_streerror, string_types, PY2, binary_streams, text_streams, \ + filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \ + _default_text_stdout, _default_text_stderr, is_bytes, WIN + +if not PY2: + from ._compat import _find_binary_writer +elif WIN: + from ._winconsole import _get_windows_argv, \ + _hash_py_argv, _initial_argv_hash + + +echo_native_types = string_types + (bytes, bytearray) + + +def _posixify(name): + return '-'.join(name.split()).lower() + + +def safecall(func): + """Wraps a function so that it swallows exceptions.""" + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception: + pass + return wrapper + + +def make_str(value): + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode('utf-8', 'replace') + return text_type(value) + + +def make_default_short_help(help, max_length=45): + """Return a condensed version of help string.""" + words = help.split() + total_length = 0 + result = [] + done = False + + for word in words: + if word[-1:] == '.': + done = True + new_length = result and 1 + len(word) or len(word) + if total_length + new_length > max_length: + result.append('...') + done = True + else: + if result: + result.append(' ') + result.append(word) + if done: + break + total_length += new_length + + return ''.join(result) + + +class LazyFile(object): + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__(self, filename, mode='r', encoding=None, errors='strict', + atomic=False): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + + if filename == '-': + self._f, self.should_close = open_stream(filename, mode, + encoding, errors) + else: + if 'r' in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name): + return getattr(self.open(), name) + + def __repr__(self): + if self._f is not None: + return repr(self._f) + return '' % (self.name, self.mode) + + def open(self): + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream(self.name, self.mode, + self.encoding, + self.errors, + atomic=self.atomic) + except (IOError, OSError) as e: + from .exceptions import FileError + raise FileError(self.name, hint=get_streerror(e)) + self._f = rv + return rv + + def close(self): + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self): + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close_intelligently() + + def __iter__(self): + self.open() + return iter(self._f) + + +class KeepOpenFile(object): + + def __init__(self, file): + self._file = file + + def __getattr__(self, name): + return getattr(self._file, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + pass + + def __repr__(self): + return repr(self._file) + + def __iter__(self): + return iter(self._file) + + +def echo(message=None, file=None, nl=True, err=False, color=None): + """Prints a message plus a newline to the given file or stdout. On + first sight, this looks like the print function, but it has improved + support for handling Unicode and binary data that does not fail no + matter how badly configured the system is. + + Primarily it means that you can print binary data as well as Unicode + data on both 2.x and 3.x to the given file in the most appropriate way + possible. This is a very carefree function in that it will try its + best to not fail. As of Click 6.0 this includes support for unicode + output on the Windows console. + + In addition to that, if `colorama`_ is installed, the echo function will + also support clever handling of ANSI codes. Essentially it will then + do the following: + + - add transparent handling of ANSI color codes on Windows. + - hide ANSI codes automatically if the destination file is not a + terminal. + + .. _colorama: https://pypi.org/project/colorama/ + + .. versionchanged:: 6.0 + As of Click 6.0 the echo function will properly support unicode + output on the windows console. Not that click does not modify + the interpreter in any way which means that `sys.stdout` or the + print statement or function will still not provide unicode support. + + .. versionchanged:: 2.0 + Starting with version 2.0 of Click, the echo function will work + with colorama if it's installed. + + .. versionadded:: 3.0 + The `err` parameter was added. + + .. versionchanged:: 4.0 + Added the `color` flag. + + :param message: the message to print + :param file: the file to write to (defaults to ``stdout``) + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``. This is faster and easier than calling + :func:`get_text_stderr` yourself. + :param nl: if set to `True` (the default) a newline is printed afterwards. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, echo_native_types): + message = text_type(message) + + if nl: + message = message or u'' + if isinstance(message, text_type): + message += u'\n' + else: + message += b'\n' + + # If there is a message, and we're in Python 3, and the value looks + # like bytes, we manually need to find the binary stream and write the + # message in there. This is done separately so that most stream + # types will work as you would expect. Eg: you can write to StringIO + # for other cases. + if message and not PY2 and is_bytes(message): + binary_file = _find_binary_writer(file) + if binary_file is not None: + file.flush() + binary_file.write(message) + binary_file.flush() + return + + # ANSI-style support. If there is no message or we are dealing with + # bytes nothing is happening. If we are connected to a file we want + # to strip colors. If we are on windows we either wrap the stream + # to strip the color or we use the colorama support to translate the + # ansi codes to API calls. + if message and not is_bytes(message): + color = resolve_color_default(color) + if should_strip_ansi(file, color): + message = strip_ansi(message) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) + elif not color: + message = strip_ansi(message) + + if message: + file.write(message) + file.flush() + + +def get_binary_stream(name): + """Returns a system stream for byte processing. This essentially + returns the stream from the sys module with the given name but it + solves some compatibility issues between different Python versions. + Primarily this function is necessary for getting binary streams on + Python 3. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError('Unknown standard stream %r' % name) + return opener() + + +def get_text_stream(name, encoding=None, errors='strict'): + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts on Python 3 + for already correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError('Unknown standard stream %r' % name) + return opener(encoding, errors) + + +def open_file(filename, mode='r', encoding=None, errors='strict', + lazy=False, atomic=False): + """This is similar to how the :class:`File` works but for manual + usage. Files are opened non lazy by default. This can open regular + files as well as stdin/stdout if ``'-'`` is passed. + + If stdin/stdout is returned the stream is wrapped so that the context + manager will not close the stream accidentally. This makes it possible + to always use the function like this without having to worry to + accidentally close a standard stream:: + + with open_file(filename) as f: + ... + + .. versionadded:: 3.0 + + :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). + :param mode: the mode in which to open the file. + :param encoding: the encoding to use. + :param errors: the error handling for this file. + :param lazy: can be flipped to true to open the file lazily. + :param atomic: in atomic mode writes go into a temporary file and it's + moved on close. + """ + if lazy: + return LazyFile(filename, mode, encoding, errors, atomic=atomic) + f, should_close = open_stream(filename, mode, encoding, errors, + atomic=atomic) + if not should_close: + f = KeepOpenFile(f) + return f + + +def get_os_args(): + """This returns the argument part of sys.argv in the most appropriate + form for processing. What this means is that this return value is in + a format that works for Click to process but does not necessarily + correspond well to what's actually standard for the interpreter. + + On most environments the return value is ``sys.argv[:1]`` unchanged. + However if you are on Windows and running Python 2 the return value + will actually be a list of unicode strings instead because the + default behavior on that platform otherwise will not be able to + carry all possible values that sys.argv can have. + + .. versionadded:: 6.0 + """ + # We can only extract the unicode argv if sys.argv has not been + # changed since the startup of the application. + if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): + return _get_windows_argv() + return sys.argv[1:] + + +def format_filename(filename, shorten=False): + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + return filename_to_ui(filename) + + +def get_app_dir(app_name, roaming=True, force_posix=False): + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Win XP (roaming): + ``C:\Documents and Settings\\Local Settings\Application Data\Foo Bar`` + Win XP (not roaming): + ``C:\Documents and Settings\\Application Data\Foo Bar`` + Win 7 (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Win 7 (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = roaming and 'APPDATA' or 'LOCALAPPDATA' + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser('~') + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) + if sys.platform == 'darwin': + return os.path.join(os.path.expanduser( + '~/Library/Application Support'), app_name) + return os.path.join( + os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), + _posixify(app_name)) + + +class PacifyFlushWrapper(object): + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + + def flush(self): + try: + self.wrapped.flush() + except IOError as e: + import errno + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr): + return getattr(self.wrapped, attr) diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__init__.py b/flask/venv/lib/python3.6/site-packages/dotenv/__init__.py new file mode 100644 index 0000000..50f27cd --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/__init__.py @@ -0,0 +1,40 @@ +from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values + + +def load_ipython_extension(ipython): + from .ipython import load_ipython_extension + load_ipython_extension(ipython) + + +def get_cli_string(path=None, action=None, key=None, value=None, quote=None): + """Returns a string suitable for running as a shell script. + + Useful for converting a arguments passed to a fabric task + to be passed to a `local` or `run` command. + """ + command = ['dotenv'] + if quote: + command.append('-q %s' % quote) + if path: + command.append('-f %s' % path) + if action: + command.append(action) + if key: + command.append(key) + if value: + if ' ' in value: + command.append('"%s"' % value) + else: + command.append(value) + + return ' '.join(command).strip() + + +__all__ = ['get_cli_string', + 'load_dotenv', + 'dotenv_values', + 'get_key', + 'set_key', + 'unset_key', + 'find_dotenv', + 'load_ipython_extension'] diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..37a5a4f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/cli.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/cli.cpython-36.pyc new file mode 100644 index 0000000..40415d1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/cli.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..39468b3 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/environ.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/environ.cpython-36.pyc new file mode 100644 index 0000000..d7aff3f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/environ.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/ipython.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/ipython.cpython-36.pyc new file mode 100644 index 0000000..f9af16f Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/ipython.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/main.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/main.cpython-36.pyc new file mode 100644 index 0000000..33bb694 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/main.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/version.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/version.cpython-36.pyc new file mode 100644 index 0000000..5ab2a01 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/dotenv/__pycache__/version.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/cli.py b/flask/venv/lib/python3.6/site-packages/dotenv/cli.py new file mode 100644 index 0000000..4e03c12 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/cli.py @@ -0,0 +1,98 @@ +import os +import sys + +try: + import click +except ImportError: + sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' + 'Run pip install "python-dotenv[cli]" to fix this.') + sys.exit(1) + +from .main import dotenv_values, get_key, set_key, unset_key, run_command +from .version import __version__ + + +@click.group() +@click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), + type=click.Path(exists=True), + help="Location of the .env file, defaults to .env file in current working directory.") +@click.option('-q', '--quote', default='always', + type=click.Choice(['always', 'never', 'auto']), + help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.version_option(version=__version__) +@click.pass_context +def cli(ctx, file, quote): + '''This script is used to set, get or unset values from a .env file.''' + ctx.obj = {} + ctx.obj['FILE'] = file + ctx.obj['QUOTE'] = quote + + +@cli.command() +@click.pass_context +def list(ctx): + '''Display all the stored key/value.''' + file = ctx.obj['FILE'] + dotenv_as_dict = dotenv_values(file) + for k, v in dotenv_as_dict.items(): + click.echo('%s=%s' % (k, v)) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +@click.argument('value', required=True) +def set(ctx, key, value): + '''Store the given key/value.''' + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + success, key, value = set_key(file, key, value, quote) + if success: + click.echo('%s=%s' % (key, value)) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def get(ctx, key): + '''Retrieve the value for the given key.''' + file = ctx.obj['FILE'] + stored_value = get_key(file, key) + if stored_value: + click.echo('%s=%s' % (key, stored_value)) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def unset(ctx, key): + '''Removes the given key.''' + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + success, key = unset_key(file, key, quote) + if success: + click.echo("Successfully removed %s" % key) + else: + exit(1) + + +@cli.command(context_settings={'ignore_unknown_options': True}) +@click.pass_context +@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) +def run(ctx, commandline): + """Run command with environment variables present.""" + file = ctx.obj['FILE'] + dotenv_as_dict = dotenv_values(file) + if not commandline: + click.echo('No command given.') + exit(1) + ret = run_command(commandline, dotenv_as_dict) + exit(ret) + + +if __name__ == "__main__": + cli() diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/compat.py b/flask/venv/lib/python3.6/site-packages/dotenv/compat.py new file mode 100644 index 0000000..f6baa36 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/compat.py @@ -0,0 +1,9 @@ +import sys +try: + from StringIO import StringIO # noqa +except ImportError: + from io import StringIO # noqa + +PY2 = sys.version_info[0] == 2 +WIN = sys.platform.startswith('win') +text_type = unicode if PY2 else str # noqa diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/environ.py b/flask/venv/lib/python3.6/site-packages/dotenv/environ.py new file mode 100644 index 0000000..ad35716 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/environ.py @@ -0,0 +1,54 @@ +import os + + +class UndefinedValueError(Exception): + pass + + +class Undefined(object): + """Class to represent undefined type. """ + pass + + +# Reference instance to represent undefined values +undefined = Undefined() + + +def _cast_boolean(value): + """ + Helper to convert config values to boolean as ConfigParser do. + """ + _BOOLEANS = {'1': True, 'yes': True, 'true': True, 'on': True, + '0': False, 'no': False, 'false': False, 'off': False, '': False} + value = str(value) + if value.lower() not in _BOOLEANS: + raise ValueError('Not a boolean: %s' % value) + + return _BOOLEANS[value.lower()] + + +def getenv(option, default=undefined, cast=undefined): + """ + Return the value for option or default if defined. + """ + + # We can't avoid __contains__ because value may be empty. + if option in os.environ: + value = os.environ[option] + else: + if isinstance(default, Undefined): + raise UndefinedValueError('{} not found. Declare it as envvar or define a default value.'.format(option)) + + value = default + + if isinstance(cast, Undefined): + return value + + if cast is bool: + value = _cast_boolean(value) + elif cast is list: + value = [x for x in value.split(',') if x] + else: + value = cast(value) + + return value diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/ipython.py b/flask/venv/lib/python3.6/site-packages/dotenv/ipython.py new file mode 100644 index 0000000..06252f1 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/ipython.py @@ -0,0 +1,41 @@ +from __future__ import print_function + +from IPython.core.magic import Magics, line_magic, magics_class +from IPython.core.magic_arguments import (argument, magic_arguments, + parse_argstring) + +from .main import find_dotenv, load_dotenv + + +@magics_class +class IPythonDotEnv(Magics): + + @magic_arguments() + @argument( + '-o', '--override', action='store_true', + help="Indicate to override existing variables" + ) + @argument( + '-v', '--verbose', action='store_true', + help="Indicate function calls to be verbose" + ) + @argument('dotenv_path', nargs='?', type=str, default='.env', + help='Search in increasingly higher folders for the `dotenv_path`') + @line_magic + def dotenv(self, line): + args = parse_argstring(self.dotenv, line) + # Locate the .env file + dotenv_path = args.dotenv_path + try: + dotenv_path = find_dotenv(dotenv_path, True, True) + except IOError: + print("cannot find .env file") + return + + # Load the .env file + load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) + + +def load_ipython_extension(ipython): + """Register the %dotenv magic.""" + ipython.register_magics(IPythonDotEnv) diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/main.py b/flask/venv/lib/python3.6/site-packages/dotenv/main.py new file mode 100644 index 0000000..98b22ec --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/main.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function, unicode_literals + +import codecs +import io +import os +import re +import shutil +import sys +from subprocess import Popen +import tempfile +import warnings +from collections import OrderedDict, namedtuple +from contextlib import contextmanager + +from .compat import StringIO, PY2, WIN, text_type + +__posix_variable = re.compile(r'\$\{[^\}]*\}') + +_binding = re.compile( + r""" + ( + \s* # leading whitespace + (?:export{0}+)? # export + + ( '[^']+' # single-quoted key + | [^=\#\s]+ # or unquoted key + )? + + (?: + (?:{0}*={0}*) # equal sign + + ( '(?:\\'|[^'])*' # single-quoted value + | "(?:\\"|[^"])*" # or double-quoted value + | [^\#\r\n]* # or unquoted value + ) + )? + + \s* # trailing whitespace + (?:\#[^\r\n]*)? # comment + (?:\r|\n|\r\n)? # newline + ) + """.format(r'[^\S\r\n]'), + re.MULTILINE | re.VERBOSE, +) + +_escape_sequence = re.compile(r"\\[\\'\"abfnrtv]") + + +Binding = namedtuple('Binding', 'key value original') + + +def decode_escapes(string): + def decode_match(match): + return codecs.decode(match.group(0), 'unicode-escape') + + return _escape_sequence.sub(decode_match, string) + + +def is_surrounded_by(string, char): + return ( + len(string) > 1 + and string[0] == string[-1] == char + ) + + +def parse_binding(string, position): + match = _binding.match(string, position) + (matched, key, value) = match.groups() + if key is None or value is None: + key = None + value = None + else: + value_quoted = is_surrounded_by(value, "'") or is_surrounded_by(value, '"') + if value_quoted: + value = decode_escapes(value[1:-1]) + else: + value = value.strip() + return (Binding(key=key, value=value, original=matched), match.end()) + + +def parse_stream(stream): + string = stream.read() + position = 0 + length = len(string) + while position < length: + (binding, position) = parse_binding(string, position) + yield binding + + +class DotEnv(): + + def __init__(self, dotenv_path, verbose=False): + self.dotenv_path = dotenv_path + self._dict = None + self.verbose = verbose + + @contextmanager + def _get_stream(self): + if isinstance(self.dotenv_path, StringIO): + yield self.dotenv_path + elif os.path.isfile(self.dotenv_path): + with io.open(self.dotenv_path) as stream: + yield stream + else: + if self.verbose: + warnings.warn("File doesn't exist {}".format(self.dotenv_path)) + yield StringIO('') + + def dict(self): + """Return dotenv as dict""" + if self._dict: + return self._dict + + values = OrderedDict(self.parse()) + self._dict = resolve_nested_variables(values) + return self._dict + + def parse(self): + with self._get_stream() as stream: + for mapping in parse_stream(stream): + if mapping.key is not None and mapping.value is not None: + yield mapping.key, mapping.value + + def set_as_environment_variables(self, override=False): + """ + Load the current dotenv as system environemt variable. + """ + for k, v in self.dict().items(): + if k in os.environ and not override: + continue + # With Python2 on Windows, force environment variables to str to avoid + # "TypeError: environment can only contain strings" in Python's subprocess.py. + if PY2 and WIN: + if isinstance(k, text_type) or isinstance(v, text_type): + k = k.encode('ascii') + v = v.encode('ascii') + os.environ[k] = v + + return True + + def get(self, key): + """ + """ + data = self.dict() + + if key in data: + return data[key] + + if self.verbose: + warnings.warn("key %s not found in %s." % (key, self.dotenv_path)) + + +def get_key(dotenv_path, key_to_get): + """ + Gets the value of a given key from the given .env + + If the .env path given doesn't exist, fails + """ + return DotEnv(dotenv_path, verbose=True).get(key_to_get) + + +@contextmanager +def rewrite(path): + try: + with tempfile.NamedTemporaryFile(mode="w+", delete=False) as dest: + with io.open(path) as source: + yield (source, dest) + except BaseException: + if os.path.isfile(dest.name): + os.unlink(dest.name) + raise + else: + shutil.move(dest.name, path) + + +def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): + """ + Adds or Updates a key/value to the given .env + + If the .env path given doesn't exist, fails instead of risking creating + an orphan .env somewhere in the filesystem + """ + value_to_set = value_to_set.strip("'").strip('"') + if not os.path.exists(dotenv_path): + warnings.warn("can't write to %s - it doesn't exist." % dotenv_path) + return None, key_to_set, value_to_set + + if " " in value_to_set: + quote_mode = "always" + + line_template = '{}="{}"\n' if quote_mode == "always" else '{}={}\n' + line_out = line_template.format(key_to_set, value_to_set) + + with rewrite(dotenv_path) as (source, dest): + replaced = False + for mapping in parse_stream(source): + if mapping.key == key_to_set: + dest.write(line_out) + replaced = True + else: + dest.write(mapping.original) + if not replaced: + dest.write(line_out) + + return True, key_to_set, value_to_set + + +def unset_key(dotenv_path, key_to_unset, quote_mode="always"): + """ + Removes a given key from the given .env + + If the .env path given doesn't exist, fails + If the given key doesn't exist in the .env, fails + """ + if not os.path.exists(dotenv_path): + warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path) + return None, key_to_unset + + removed = False + with rewrite(dotenv_path) as (source, dest): + for mapping in parse_stream(source): + if mapping.key == key_to_unset: + removed = True + else: + dest.write(mapping.original) + + if not removed: + warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) + return None, key_to_unset + + return removed, key_to_unset + + +def resolve_nested_variables(values): + def _replacement(name): + """ + get appropriate value for a variable name. + first search in environ, if not found, + then look into the dotenv variables + """ + ret = os.getenv(name, new_values.get(name, "")) + return ret + + def _re_sub_callback(match_object): + """ + From a match object gets the variable name and returns + the correct replacement + """ + return _replacement(match_object.group()[2:-1]) + + new_values = {} + + for k, v in values.items(): + new_values[k] = __posix_variable.sub(_re_sub_callback, v) + + return new_values + + +def _walk_to_root(path): + """ + Yield directories starting from the given directory up to the root + """ + if not os.path.exists(path): + raise IOError('Starting path not found') + + if os.path.isfile(path): + path = os.path.dirname(path) + + last_dir = None + current_dir = os.path.abspath(path) + while last_dir != current_dir: + yield current_dir + parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) + last_dir, current_dir = current_dir, parent_dir + + +def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): + """ + Search in increasingly higher folders for the given file + + Returns path to the file if found, or an empty string otherwise + """ + if usecwd or '__file__' not in globals(): + # should work without __file__, e.g. in REPL or IPython notebook + path = os.getcwd() + else: + # will work for .py files + frame = sys._getframe() + # find first frame that is outside of this file + while frame.f_code.co_filename == __file__: + frame = frame.f_back + frame_filename = frame.f_code.co_filename + path = os.path.dirname(os.path.abspath(frame_filename)) + + for dirname in _walk_to_root(path): + check_path = os.path.join(dirname, filename) + if os.path.isfile(check_path): + return check_path + + if raise_error_if_not_found: + raise IOError('File not found') + + return '' + + +def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False): + f = dotenv_path or stream or find_dotenv() + return DotEnv(f, verbose=verbose).set_as_environment_variables(override=override) + + +def dotenv_values(dotenv_path=None, stream=None, verbose=False): + f = dotenv_path or stream or find_dotenv() + return DotEnv(f, verbose=verbose).dict() + + +def run_command(command, env): + """Run command in sub process. + + Runs the command in a sub process with the variables from `env` + added in the current environment variables. + + Parameters + ---------- + command: List[str] + The command and it's parameters + env: Dict + The additional environment variables + + Returns + ------- + int + The return code of the command + + """ + # copy the current environment variables and add the vales from + # `env` + cmd_env = os.environ.copy() + cmd_env.update(env) + + p = Popen(command, + universal_newlines=True, + bufsize=0, + shell=False, + env=cmd_env) + _, _ = p.communicate() + + return p.returncode diff --git a/flask/venv/lib/python3.6/site-packages/dotenv/version.py b/flask/venv/lib/python3.6/site-packages/dotenv/version.py new file mode 100644 index 0000000..1f4c4d4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/dotenv/version.py @@ -0,0 +1 @@ +__version__ = "0.10.1" diff --git a/flask/venv/lib/python3.6/site-packages/easy_install.py b/flask/venv/lib/python3.6/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/flask/venv/lib/python3.6/site-packages/flask/__init__.py b/flask/venv/lib/python3.6/site-packages/flask/__init__.py new file mode 100644 index 0000000..ded1982 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" + flask + ~~~~~ + + A microframework based on Werkzeug. It's extensively documented + and follows best practice patterns. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +__version__ = '1.0.2' + +# utilities we import from Werkzeug and Jinja2 that are unused +# in the module but are exported as public interface. +from werkzeug.exceptions import abort +from werkzeug.utils import redirect +from jinja2 import Markup, escape + +from .app import Flask, Request, Response +from .config import Config +from .helpers import url_for, flash, send_file, send_from_directory, \ + get_flashed_messages, get_template_attribute, make_response, safe_join, \ + stream_with_context +from .globals import current_app, g, request, session, _request_ctx_stack, \ + _app_ctx_stack +from .ctx import has_request_context, has_app_context, \ + after_this_request, copy_current_request_context +from .blueprints import Blueprint +from .templating import render_template, render_template_string + +# the signals +from .signals import signals_available, template_rendered, request_started, \ + request_finished, got_request_exception, request_tearing_down, \ + appcontext_tearing_down, appcontext_pushed, \ + appcontext_popped, message_flashed, before_render_template + +# We're not exposing the actual json module but a convenient wrapper around +# it. +from . import json + +# This was the only thing that Flask used to export at one point and it had +# a more generic name. +jsonify = json.jsonify + +# backwards compat, goes away in 1.0 +from .sessions import SecureCookieSession as Session +json_available = True diff --git a/flask/venv/lib/python3.6/site-packages/flask/__main__.py b/flask/venv/lib/python3.6/site-packages/flask/__main__.py new file mode 100644 index 0000000..4aee654 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/__main__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" + flask.__main__ + ~~~~~~~~~~~~~~ + + Alias for flask.run for the command line. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +if __name__ == '__main__': + from .cli import main + main(as_module=True) diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..eba1acb Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__main__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__main__.cpython-36.pyc new file mode 100644 index 0000000..37c0e7d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/__main__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/_compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 0000000..765a3ed Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/_compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/app.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/app.cpython-36.pyc new file mode 100644 index 0000000..7a3df81 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/app.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/blueprints.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/blueprints.cpython-36.pyc new file mode 100644 index 0000000..3a69efc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/blueprints.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/cli.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/cli.cpython-36.pyc new file mode 100644 index 0000000..ada61f8 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/cli.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/config.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/config.cpython-36.pyc new file mode 100644 index 0000000..b8dc64a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/config.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/ctx.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/ctx.cpython-36.pyc new file mode 100644 index 0000000..1e6eb60 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/ctx.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/debughelpers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/debughelpers.cpython-36.pyc new file mode 100644 index 0000000..ebefbc1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/debughelpers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/globals.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/globals.cpython-36.pyc new file mode 100644 index 0000000..c1d86f6 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/globals.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/helpers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/helpers.cpython-36.pyc new file mode 100644 index 0000000..5c2d448 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/helpers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/logging.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/logging.cpython-36.pyc new file mode 100644 index 0000000..a6c843d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/logging.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/sessions.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/sessions.cpython-36.pyc new file mode 100644 index 0000000..96138ab Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/sessions.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/signals.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/signals.cpython-36.pyc new file mode 100644 index 0000000..45e9602 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/signals.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/templating.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/templating.cpython-36.pyc new file mode 100644 index 0000000..6e92cbc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/templating.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/testing.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/testing.cpython-36.pyc new file mode 100644 index 0000000..02a1515 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/testing.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/views.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/views.cpython-36.pyc new file mode 100644 index 0000000..a30f5df Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/views.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/__pycache__/wrappers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/wrappers.cpython-36.pyc new file mode 100644 index 0000000..641cd86 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/flask/__pycache__/wrappers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/flask/_compat.py b/flask/venv/lib/python3.6/site-packages/flask/_compat.py new file mode 100644 index 0000000..a3b5b9c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/_compat.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" + flask._compat + ~~~~~~~~~~~~~ + + Some py2/py3 compatibility support based on a stripped down + version of six so we don't have to depend on a specific version + of it. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import sys + +PY2 = sys.version_info[0] == 2 +_identity = lambda x: x + + +if not PY2: + text_type = str + string_types = (str,) + integer_types = (int,) + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from inspect import getfullargspec as getargspec + from io import StringIO + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + implements_to_string = _identity + +else: + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from inspect import getargspec + from cStringIO import StringIO + + exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a + # dummy metaclass for one level of class instantiation that replaces + # itself with the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +# Certain versions of pypy have a bug where clearing the exception stack +# breaks the __exit__ function in a very peculiar way. The second level of +# exception blocks is necessary because pypy seems to forget to check if an +# exception happened until the next bytecode instruction? +# +# Relevant PyPy bugfix commit: +# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301 +# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later +# versions. +# +# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug. +BROKEN_PYPY_CTXMGR_EXIT = False +if hasattr(sys, 'pypy_version_info'): + class _Mgr(object): + def __enter__(self): + return self + def __exit__(self, *args): + if hasattr(sys, 'exc_clear'): + # Python 3 (PyPy3) doesn't have exc_clear + sys.exc_clear() + try: + try: + with _Mgr(): + raise AssertionError() + except: + raise + except TypeError: + BROKEN_PYPY_CTXMGR_EXIT = True + except AssertionError: + pass diff --git a/flask/venv/lib/python3.6/site-packages/flask/app.py b/flask/venv/lib/python3.6/site-packages/flask/app.py new file mode 100644 index 0000000..87c5900 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/app.py @@ -0,0 +1,2315 @@ +# -*- coding: utf-8 -*- +""" + flask.app + ~~~~~~~~~ + + This module implements the central WSGI application object. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import os +import sys +import warnings +from datetime import timedelta +from functools import update_wrapper +from itertools import chain +from threading import Lock + +from werkzeug.datastructures import Headers, ImmutableDict +from werkzeug.exceptions import BadRequest, BadRequestKeyError, HTTPException, \ + InternalServerError, MethodNotAllowed, default_exceptions +from werkzeug.routing import BuildError, Map, RequestRedirect, Rule + +from . import cli, json +from ._compat import integer_types, reraise, string_types, text_type +from .config import Config, ConfigAttribute +from .ctx import AppContext, RequestContext, _AppCtxGlobals +from .globals import _request_ctx_stack, g, request, session +from .helpers import ( + _PackageBoundObject, + _endpoint_from_view_func, find_package, get_env, get_debug_flag, + get_flashed_messages, locked_cached_property, url_for, get_load_dotenv +) +from .logging import create_logger +from .sessions import SecureCookieSessionInterface +from .signals import appcontext_tearing_down, got_request_exception, \ + request_finished, request_started, request_tearing_down +from .templating import DispatchingJinjaLoader, Environment, \ + _default_template_ctx_processor +from .wrappers import Request, Response + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +def _make_timedelta(value): + if not isinstance(value, timedelta): + return timedelta(seconds=value) + return value + + +def setupmethod(f): + """Wraps a method so that it performs a check in debug mode if the + first request was already handled. + """ + def wrapper_func(self, *args, **kwargs): + if self.debug and self._got_first_request: + raise AssertionError('A setup function was called after the ' + 'first request was handled. This usually indicates a bug ' + 'in the application where a module was not imported ' + 'and decorators or other functionality was called too late.\n' + 'To fix this make sure to import all your view modules, ' + 'database models and everything related at a central place ' + 'before the application starts serving requests.') + return f(self, *args, **kwargs) + return update_wrapper(wrapper_func, f) + + +class Flask(_PackageBoundObject): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: the folder with static files that should be served + at `static_url_path`. Defaults to the ``'static'`` + folder in the root path of the application. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: Flask by default will automatically calculate the path + to the root of the application. In certain situations + this cannot be achieved (for instance if the package + is a Python 3 namespace package) and needs to be + manually defined. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute('TESTING') + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute('SECRET_KEY') + + #: The secure cookie uses this for the name of the session cookie. + #: + #: This attribute can also be configured from the config with the + #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` + session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME') + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME', + get_converter=_make_timedelta) + + #: A :class:`~datetime.timedelta` which is used as default cache_timeout + #: for the :func:`send_file` functions. The default is 12 hours. + #: + #: This attribute can also be configured from the config with the + #: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration + #: variable can also be set with an integer value used as seconds. + #: Defaults to ``timedelta(hours=12)`` + send_file_max_age_default = ConfigAttribute('SEND_FILE_MAX_AGE_DEFAULT', + get_converter=_make_timedelta) + + #: Enable this if you want to use the X-Sendfile feature. Keep in + #: mind that the server has to support this. This only affects files + #: sent with the :func:`send_file` method. + #: + #: .. versionadded:: 0.2 + #: + #: This attribute can also be configured from the config with the + #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. + use_x_sendfile = ConfigAttribute('USE_X_SENDFILE') + + #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. + #: + #: .. versionadded:: 0.10 + json_encoder = json.JSONEncoder + + #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. + #: + #: .. versionadded:: 0.10 + json_decoder = json.JSONDecoder + + #: Options that are passed directly to the Jinja2 environment. + jinja_options = ImmutableDict( + extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_'] + ) + + #: Default configuration parameters. + default_config = ImmutableDict({ + 'ENV': None, + 'DEBUG': None, + 'TESTING': False, + 'PROPAGATE_EXCEPTIONS': None, + 'PRESERVE_CONTEXT_ON_EXCEPTION': None, + 'SECRET_KEY': None, + 'PERMANENT_SESSION_LIFETIME': timedelta(days=31), + 'USE_X_SENDFILE': False, + 'SERVER_NAME': None, + 'APPLICATION_ROOT': '/', + 'SESSION_COOKIE_NAME': 'session', + 'SESSION_COOKIE_DOMAIN': None, + 'SESSION_COOKIE_PATH': None, + 'SESSION_COOKIE_HTTPONLY': True, + 'SESSION_COOKIE_SECURE': False, + 'SESSION_COOKIE_SAMESITE': None, + 'SESSION_REFRESH_EACH_REQUEST': True, + 'MAX_CONTENT_LENGTH': None, + 'SEND_FILE_MAX_AGE_DEFAULT': timedelta(hours=12), + 'TRAP_BAD_REQUEST_ERRORS': None, + 'TRAP_HTTP_EXCEPTIONS': False, + 'EXPLAIN_TEMPLATE_LOADING': False, + 'PREFERRED_URL_SCHEME': 'http', + 'JSON_AS_ASCII': True, + 'JSON_SORT_KEYS': True, + 'JSONIFY_PRETTYPRINT_REGULAR': False, + 'JSONIFY_MIMETYPE': 'application/json', + 'TEMPLATES_AUTO_RELOAD': None, + 'MAX_COOKIE_SIZE': 4093, + }) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: the test client that is used with when `test_client` is used. + #: + #: .. versionadded:: 0.7 + test_client_class = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface = SecureCookieSessionInterface() + + # TODO remove the next three attrs when Sphinx :inherited-members: works + # https://github.com/sphinx-doc/sphinx/issues/741 + + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__( + self, + import_name, + static_url_path=None, + static_folder='static', + static_host=None, + host_matching=False, + subdomain_matching=False, + template_folder='templates', + instance_path=None, + instance_relative_config=False, + root_path=None + ): + _PackageBoundObject.__init__( + self, + import_name, + template_folder=template_folder, + root_path=root_path + ) + + if static_url_path is not None: + self.static_url_path = static_url_path + + if static_folder is not None: + self.static_folder = static_folder + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + 'If an instance path is provided it must be absolute.' + ' A relative path was given instead.' + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: A dictionary of all view functions registered. The keys will + #: be function names which are also used to generate URLs and + #: the values are the function objects themselves. + #: To register a view function, use the :meth:`route` decorator. + self.view_functions = {} + + #: A dictionary of all registered error handlers. The key is ``None`` + #: for error handlers active on the application, otherwise the key is + #: the name of the blueprint. Each key points to another dictionary + #: where the key is the status code of the http exception. The + #: special key ``None`` points to a list of tuples where the first item + #: is the class for the instance check and the second the error handler + #: function. + #: + #: To register an error handler, use the :meth:`errorhandler` + #: decorator. + self.error_handler_spec = {} + + #: A list of functions that are called when :meth:`url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function registered here + #: is called with `error`, `endpoint` and `values`. If a function + #: returns ``None`` or raises a :exc:`BuildError` the next function is + #: tried. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers = [] + + #: A dictionary with lists of functions that will be called at the + #: beginning of each request. The key of the dictionary is the name of + #: the blueprint this function is active for, or ``None`` for all + #: requests. To register a function, use the :meth:`before_request` + #: decorator. + self.before_request_funcs = {} + + #: A list of functions that will be called at the beginning of the + #: first request to this instance. To register a function, use the + #: :meth:`before_first_request` decorator. + #: + #: .. versionadded:: 0.8 + self.before_first_request_funcs = [] + + #: A dictionary with lists of functions that should be called after + #: each request. The key of the dictionary is the name of the blueprint + #: this function is active for, ``None`` for all requests. This can for + #: example be used to close database connections. To register a function + #: here, use the :meth:`after_request` decorator. + self.after_request_funcs = {} + + #: A dictionary with lists of functions that are called after + #: each request, even if an exception has occurred. The key of the + #: dictionary is the name of the blueprint this function is active for, + #: ``None`` for all requests. These functions are not allowed to modify + #: the request, and their return values are ignored. If an exception + #: occurred while processing the request, it gets passed to each + #: teardown_request function. To register a function here, use the + #: :meth:`teardown_request` decorator. + #: + #: .. versionadded:: 0.7 + self.teardown_request_funcs = {} + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs = [] + + #: A dictionary with lists of functions that are called before the + #: :attr:`before_request_funcs` functions. The key of the dictionary is + #: the name of the blueprint this function is active for, or ``None`` + #: for all requests. To register a function, use + #: :meth:`url_value_preprocessor`. + #: + #: .. versionadded:: 0.7 + self.url_value_preprocessors = {} + + #: A dictionary with lists of functions that can be used as URL value + #: preprocessors. The key ``None`` here is used for application wide + #: callbacks, otherwise the key is the name of the blueprint. + #: Each of these functions has the chance to modify the dictionary + #: of URL values before they are used as the keyword arguments of the + #: view function. For each function registered this one should also + #: provide a :meth:`url_defaults` function that adds the parameters + #: automatically again that were removed that way. + #: + #: .. versionadded:: 0.7 + self.url_default_functions = {} + + #: A dictionary with list of functions that are called without argument + #: to populate the template context. The key of the dictionary is the + #: name of the blueprint this function is active for, ``None`` for all + #: requests. Each returns a dictionary that the template context is + #: updated with. To register a function here, use the + #: :meth:`context_processor` decorator. + self.template_context_processors = { + None: [_default_template_ctx_processor] + } + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors = [] + + #: all the attached blueprints in a dictionary by name. Blueprints + #: can be attached multiple times so this dictionary does not tell + #: you how often they got attached. + #: + #: .. versionadded:: 0.7 + self.blueprints = {} + self._blueprint_order = [] + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. For backwards compatibility extensions should register + #: themselves like this:: + #: + #: if not hasattr(app, 'extensions'): + #: app.extensions = {} + #: app.extensions['extensionname'] = SomeObject() + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = Map() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + self._before_request_lock = Lock() + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert bool(static_host) == host_matching, 'Invalid static_host/host_matching combination' + self.add_url_rule( + self.static_url_path + '/', + endpoint='static', + host=static_host, + view_func=self.send_static_file + ) + + #: The click command line context for this application. Commands + #: registered here show up in the :command:`flask` command once the + #: application has been discovered. The default commands are + #: provided by Flask itself and can be overridden. + #: + #: This is an instance of a :class:`click.Group` object. + self.cli = cli.AppGroup(self.name) + + @locked_cached_property + def name(self): + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == '__main__': + fn = getattr(sys.modules['__main__'], '__file__', None) + if fn is None: + return '__main__' + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self): + """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration + value in case it's set, otherwise a sensible default is returned. + + .. versionadded:: 0.7 + """ + rv = self.config['PROPAGATE_EXCEPTIONS'] + if rv is not None: + return rv + return self.testing or self.debug + + @property + def preserve_context_on_exception(self): + """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` + configuration value in case it's set, otherwise a sensible default + is returned. + + .. versionadded:: 0.7 + """ + rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION'] + if rv is not None: + return rv + return self.debug + + @locked_cached_property + def logger(self): + """The ``'flask.app'`` logger, a standard Python + :class:`~logging.Logger`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will be set + to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be added. + See :ref:`logging` for more information. + + .. versionchanged:: 1.0 + Behavior was simplified. The logger is always named + ``flask.app``. The level is only set during configuration, it + doesn't check ``app.debug`` each time. Only one format is used, + not different ones depending on ``app.debug``. No handlers are + removed, and a handler is only added if no handlers are already + configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @locked_cached_property + def jinja_env(self): + """The Jinja2 environment used to load templates.""" + return self.create_jinja_environment() + + @property + def got_first_request(self): + """This attribute is set to ``True`` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative=False): + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults['ENV'] = get_env() + defaults['DEBUG'] = get_debug_flag() + return self.config_class(root_path, defaults) + + def auto_find_instance_path(self): + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, 'instance') + return os.path.join(prefix, 'var', self.name + '-instance') + + def open_instance_resource(self, resource, mode='rb'): + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + def _get_templates_auto_reload(self): + """Reload templates when they are changed. Used by + :meth:`create_jinja_environment`. + + This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If + not set, it will be enabled in debug mode. + + .. versionadded:: 1.0 + This property was added but the underlying config and behavior + already existed. + """ + rv = self.config['TEMPLATES_AUTO_RELOAD'] + return rv if rv is not None else self.debug + + def _set_templates_auto_reload(self, value): + self.config['TEMPLATES_AUTO_RELOAD'] = value + + templates_auto_reload = property( + _get_templates_auto_reload, _set_templates_auto_reload + ) + del _get_templates_auto_reload, _set_templates_auto_reload + + def create_jinja_environment(self): + """Creates the Jinja2 environment based on :attr:`jinja_options` + and :meth:`select_jinja_autoescape`. Since 0.7 this also adds + the Jinja2 globals and filters after initialization. Override + this function to customize the behavior. + + .. versionadded:: 0.5 + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + """ + options = dict(self.jinja_options) + + if 'autoescape' not in options: + options['autoescape'] = self.select_jinja_autoescape + + if 'auto_reload' not in options: + options['auto_reload'] = self.templates_auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g + ) + rv.filters['tojson'] = json.tojson_filter + return rv + + def create_global_jinja_loader(self): + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename): + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith(('.html', '.htm', '.xml', '.xhtml')) + + def update_template_context(self, context): + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + funcs = self.template_context_processors[None] + reqctx = _request_ctx_stack.top + if reqctx is not None: + bp = reqctx.request.blueprint + if bp is not None and bp in self.template_context_processors: + funcs = chain(funcs, self.template_context_processors[bp]) + orig_ctx = context.copy() + for func in funcs: + context.update(func()) + # make sure the original values win. This makes it possible to + # easier add new variables in context processors without breaking + # existing views. + context.update(orig_ctx) + + def make_shell_context(self): + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {'app': self, 'g': g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + #: What environment the app is running in. Flask and extensions may + #: enable behaviors based on the environment, such as enabling debug + #: mode. This maps to the :data:`ENV` config key. This is set by the + #: :envvar:`FLASK_ENV` environment variable and may not behave as + #: expected if set in code. + #: + #: **Do not enable development when deploying in production.** + #: + #: Default: ``'production'`` + env = ConfigAttribute('ENV') + + def _get_debug(self): + return self.config['DEBUG'] + + def _set_debug(self, value): + self.config['DEBUG'] = value + self.jinja_env.auto_reload = self.templates_auto_reload + + #: Whether debug mode is enabled. When using ``flask run`` to start + #: the development server, an interactive debugger will be shown for + #: unhandled exceptions, and the server will be reloaded when code + #: changes. This maps to the :data:`DEBUG` config key. This is + #: enabled when :attr:`env` is ``'development'`` and is overridden + #: by the ``FLASK_DEBUG`` environment variable. It may not behave as + #: expected if set in code. + #: + #: **Do not enable debug mode when deploying in production.** + #: + #: Default: ``True`` if :attr:`env` is ``'development'``, or + #: ``False`` otherwise. + debug = property(_get_debug, _set_debug) + del _get_debug, _set_debug + + def run(self, host=None, port=None, debug=None, + load_dotenv=True, **options): + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :ref:`deployment` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` + environment variables will override :attr:`env` and + :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Change this into a no-op if the server is invoked from the + # command line. Have a look at cli.py for more information. + if os.environ.get('FLASK_RUN_FROM_CLI') == 'true': + from .debughelpers import explain_ignored_app_run + explain_ignored_app_run() + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, let env vars override previous values + if 'FLASK_ENV' in os.environ: + self.env = get_env() + self.debug = get_debug_flag() + elif 'FLASK_DEBUG' in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + _host = '127.0.0.1' + _port = 5000 + server_name = self.config.get('SERVER_NAME') + sn_host, sn_port = None, None + + if server_name: + sn_host, _, sn_port = server_name.partition(':') + + host = host or sn_host or _host + port = int(port or sn_port or _port) + + options.setdefault('use_reloader', self.debug) + options.setdefault('use_debugger', self.debug) + options.setdefault('threaded', True) + + cli.show_server_banner(self.env, self.debug, self.name, False) + + from werkzeug.serving import run_simple + + try: + run_simple(host, port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies=True, **kwargs): + """Creates a test client for this application. For information + about unit testing head over to :ref:`testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from flask.testing import FlaskClient as cls + return cls(self, self.response_class, use_cookies=use_cookies, **kwargs) + + def test_cli_runner(self, **kwargs): + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from flask.testing import FlaskCliRunner as cls + + return cls(self, **kwargs) + + def open_session(self, request): + """Creates or opens a new session. Default implementation stores all + session data in a signed cookie. This requires that the + :attr:`secret_key` is set. Instead of overriding this method + we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.open_session`` + instead. + + :param request: an instance of :attr:`request_class`. + """ + + warnings.warn(DeprecationWarning( + '"open_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.open_session" instead.' + )) + return self.session_interface.open_session(self, request) + + def save_session(self, session, response): + """Saves the session if it needs updates. For the default + implementation, check :meth:`open_session`. Instead of overriding this + method we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.save_session`` + instead. + + :param session: the session to be saved (a + :class:`~werkzeug.contrib.securecookie.SecureCookie` + object) + :param response: an instance of :attr:`response_class` + """ + + warnings.warn(DeprecationWarning( + '"save_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.save_session" instead.' + )) + return self.session_interface.save_session(self, session, response) + + def make_null_session(self): + """Creates a new instance of a missing session. Instead of overriding + this method we recommend replacing the :class:`session_interface`. + + .. deprecated: 1.0 + Will be removed in 1.1. Use ``session_interface.make_null_session`` + instead. + + .. versionadded:: 0.7 + """ + + warnings.warn(DeprecationWarning( + '"make_null_session" is deprecated and will be removed in 1.1. Use' + ' "session_interface.make_null_session" instead.' + )) + return self.session_interface.make_null_session(self) + + @setupmethod + def register_blueprint(self, blueprint, **options): + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionadded:: 0.7 + """ + first_registration = False + + if blueprint.name in self.blueprints: + assert self.blueprints[blueprint.name] is blueprint, ( + 'A name collision occurred between blueprints %r and %r. Both' + ' share the same name "%s". Blueprints that are created on the' + ' fly need unique names.' % ( + blueprint, self.blueprints[blueprint.name], blueprint.name + ) + ) + else: + self.blueprints[blueprint.name] = blueprint + self._blueprint_order.append(blueprint) + first_registration = True + + blueprint.register(self, options, first_registration) + + def iter_blueprints(self): + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return iter(self._blueprint_order) + + @setupmethod + def add_url_rule(self, rule, endpoint=None, view_func=None, + provide_automatic_options=None, **options): + """Connects a URL rule. Works exactly like the :meth:`route` + decorator. If a view_func is provided it will be registered with the + endpoint. + + Basically this example:: + + @app.route('/') + def index(): + pass + + Is equivalent to the following:: + + def index(): + pass + app.add_url_rule('/', 'index', index) + + If the view_func is not provided you will need to connect the endpoint + to a view function like so:: + + app.view_functions['index'] = index + + Internally :meth:`route` invokes :meth:`add_url_rule` so if you want + to customize the behavior via subclassing you only need to change + this method. + + For more information refer to :ref:`url-route-registrations`. + + .. versionchanged:: 0.2 + `view_func` parameter added. + + .. versionchanged:: 0.6 + ``OPTIONS`` is added automatically as method. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param view_func: the function to call when serving a request to the + provided endpoint + :param provide_automatic_options: controls whether the ``OPTIONS`` + method should be added automatically. This can also be controlled + by setting the ``view_func.provide_automatic_options = False`` + before adding the rule. + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (``GET``, ``POST`` etc.). By default a rule + just listens for ``GET`` (and implicitly ``HEAD``). + Starting with Flask 0.6, ``OPTIONS`` is implicitly + added and handled by the standard request handling. + """ + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + options['endpoint'] = endpoint + methods = options.pop('methods', None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, 'methods', None) or ('GET',) + if isinstance(methods, string_types): + raise TypeError('Allowed methods have to be iterables of strings, ' + 'for example: @app.route(..., methods=["POST"])') + methods = set(item.upper() for item in methods) + + # Methods that should always be added + required_methods = set(getattr(view_func, 'required_methods', ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr(view_func, + 'provide_automatic_options', None) + + if provide_automatic_options is None: + if 'OPTIONS' not in methods: + provide_automatic_options = True + required_methods.add('OPTIONS') + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError('View function mapping is overwriting an ' + 'existing endpoint function: %s' % endpoint) + self.view_functions[endpoint] = view_func + + def route(self, rule, **options): + """A decorator that is used to register a view function for a + given URL rule. This does the same thing as :meth:`add_url_rule` + but is intended for decorator usage:: + + @app.route('/') + def index(): + return 'Hello World' + + For more information refer to :ref:`url-route-registrations`. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (``GET``, ``POST`` etc.). By default a rule + just listens for ``GET`` (and implicitly ``HEAD``). + Starting with Flask 0.6, ``OPTIONS`` is implicitly + added and handled by the standard request handling. + """ + def decorator(f): + endpoint = options.pop('endpoint', None) + self.add_url_rule(rule, endpoint, f, **options) + return f + return decorator + + @setupmethod + def endpoint(self, endpoint): + """A decorator to register a function as an endpoint. + Example:: + + @app.endpoint('example.endpoint') + def example(): + return "example" + + :param endpoint: the name of the endpoint + """ + def decorator(f): + self.view_functions[endpoint] = f + return f + return decorator + + @staticmethod + def _get_exc_class_and_code(exc_class_or_code): + """Ensure that we register only exceptions as handler keys""" + if isinstance(exc_class_or_code, integer_types): + exc_class = default_exceptions[exc_class_or_code] + else: + exc_class = exc_class_or_code + + assert issubclass(exc_class, Exception) + + if issubclass(exc_class, HTTPException): + return exc_class, exc_class.code + else: + return exc_class, None + + @setupmethod + def errorhandler(self, code_or_exception): + """Register a function to handle errors by code or exception class. + + A decorator that is used to register a function given an + error code. Example:: + + @app.errorhandler(404) + def page_not_found(error): + return 'This page does not exist', 404 + + You can also register handlers for arbitrary exceptions:: + + @app.errorhandler(DatabaseError) + def special_exception_handler(error): + return 'Database connection failed', 500 + + .. versionadded:: 0.7 + Use :meth:`register_error_handler` instead of modifying + :attr:`error_handler_spec` directly, for application wide error + handlers. + + .. versionadded:: 0.7 + One can now additionally also register custom exception types + that do not necessarily have to be a subclass of the + :class:`~werkzeug.exceptions.HTTPException` class. + + :param code_or_exception: the code as integer for the handler, or + an arbitrary exception + """ + def decorator(f): + self._register_error_handler(None, code_or_exception, f) + return f + return decorator + + @setupmethod + def register_error_handler(self, code_or_exception, f): + """Alternative error attach function to the :meth:`errorhandler` + decorator that is more straightforward to use for non decorator + usage. + + .. versionadded:: 0.7 + """ + self._register_error_handler(None, code_or_exception, f) + + @setupmethod + def _register_error_handler(self, key, code_or_exception, f): + """ + :type key: None|str + :type code_or_exception: int|T<=Exception + :type f: callable + """ + if isinstance(code_or_exception, HTTPException): # old broken behavior + raise ValueError( + 'Tried to register a handler for an exception instance {0!r}.' + ' Handlers can only be registered for exception classes or' + ' HTTP error codes.'.format(code_or_exception) + ) + + try: + exc_class, code = self._get_exc_class_and_code(code_or_exception) + except KeyError: + raise KeyError( + "'{0}' is not a recognized HTTP error code. Use a subclass of" + " HTTPException with that code instead.".format(code_or_exception) + ) + + handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {}) + handlers[exc_class] = f + + @setupmethod + def template_filter(self, name=None): + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_filter(f, name=name) + return f + return decorator + + @setupmethod + def add_template_filter(self, f, name=None): + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test(self, name=None): + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_test(f, name=name) + return f + return decorator + + @setupmethod + def add_template_test(self, f, name=None): + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global(self, name=None): + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_global(f, name=name) + return f + return decorator + + @setupmethod + def add_template_global(self, f, name=None): + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_request(self, f): + """Registers a function to run before each request. + + For example, this can be used to open a database connection, or to load + the logged in user from the session. + + The function will be called without any arguments. If it returns a + non-None value, the value is handled as if it was the return value from + the view, and further request handling is stopped. + """ + self.before_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def before_first_request(self, f): + """Registers a function to be run before the first request to this + instance of the application. + + The function will be called without any arguments and its return + value is ignored. + + .. versionadded:: 0.8 + """ + self.before_first_request_funcs.append(f) + return f + + @setupmethod + def after_request(self, f): + """Register a function to be run after each request. + + Your function must take one parameter, an instance of + :attr:`response_class` and return a new response object or the + same (see :meth:`process_response`). + + As of Flask 0.7 this function might not be executed at the end of the + request in case an unhandled exception occurred. + """ + self.after_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_request(self, f): + """Register a function to be run at the end of each request, + regardless of whether there was an exception or not. These functions + are executed when the request context is popped, even if not an + actual request was performed. + + Example:: + + ctx = app.test_request_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the request context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Generally teardown functions must take every necessary step to avoid + that they will fail. If they do execute code that might fail they + will have to surround the execution of these code by try/except + statements and log occurring errors. + + When a teardown function was called because of an exception it will + be passed an error object. + + The return values of teardown functions are ignored. + + .. admonition:: Debug Note + + In debug mode Flask will not tear down a request on an exception + immediately. Instead it will keep it alive so that the interactive + debugger can still access it. This behavior can be controlled + by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. + """ + self.teardown_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_appcontext(self, f): + """Registers a function to be called when the application context + ends. These functions are typically also called when the request + context is popped. + + Example:: + + ctx = app.app_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the app context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Since a request context typically also manages an application + context it would also be called when you pop a request context. + + When a teardown function was called because of an unhandled exception + it will be passed an error object. If an :meth:`errorhandler` is + registered, it will handle the exception and the teardown will not + receive it. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def context_processor(self, f): + """Registers a template context processor function.""" + self.template_context_processors[None].append(f) + return f + + @setupmethod + def shell_context_processor(self, f): + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + @setupmethod + def url_value_preprocessor(self, f): + """Register a URL value preprocessor function for all view + functions in the application. These functions will be called before the + :meth:`before_request` functions. + + The function can modify the values captured from the matched url before + they are passed to the view. For example, this can be used to pop a + common language code value and place it in ``g`` rather than pass it to + every view. + + The function is passed the endpoint name and values dict. The return + value is ignored. + """ + self.url_value_preprocessors.setdefault(None, []).append(f) + return f + + @setupmethod + def url_defaults(self, f): + """Callback function for URL defaults for all view functions of the + application. It's called with the endpoint and values and should + update the values passed in place. + """ + self.url_default_functions.setdefault(None, []).append(f) + return f + + def _find_error_handler(self, e): + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + + for name, c in ( + (request.blueprint, code), (None, code), + (request.blueprint, None), (None, None) + ): + handler_map = self.error_handler_spec.setdefault(name, {}).get(c) + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + + def handle_http_exception(self, e): + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return handler(e) + + def trap_http_exception(self, e): + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config['TRAP_HTTP_EXCEPTIONS']: + return True + + trap_bad_request = self.config['TRAP_BAD_REQUEST_ERRORS'] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception(self, e): + """This method is called whenever an exception occurs that should be + handled. A special case are + :class:`~werkzeug.exception.HTTPException`\s which are forwarded by + this function to the :meth:`handle_http_exception` method. This + function will either return a response value or reraise the + exception with the same traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the the bad + key in debug mode rather than a generic bad request message. + + .. versionadded:: 0.7 + """ + exc_type, exc_value, tb = sys.exc_info() + assert exc_value is e + # ensure not to trash sys.exc_info() at that point in case someone + # wants the traceback preserved in handle_http_exception. Of course + # we cannot prevent users from trashing it themselves in a custom + # trap_http_exception method so that's their fault then. + + # MultiDict passes the key to the exception, but that's ignored + # when generating the response message. Set an informative + # description for key errors in debug mode or when trapping errors. + if ( + (self.debug or self.config['TRAP_BAD_REQUEST_ERRORS']) + and isinstance(e, BadRequestKeyError) + # only set it if it's still the default description + and e.description is BadRequestKeyError.description + ): + e.description = "KeyError: '{0}'".format(*e.args) + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + reraise(exc_type, exc_value, tb) + return handler(e) + + def handle_exception(self, e): + """Default exception handling that kicks in when an exception + occurs that is not caught. In debug mode the exception will + be re-raised immediately, otherwise it is logged and the handler + for a 500 internal server error is used. If no such handler + exists, a default 500 internal server error message is displayed. + + .. versionadded:: 0.3 + """ + exc_type, exc_value, tb = sys.exc_info() + + got_request_exception.send(self, exception=e) + handler = self._find_error_handler(InternalServerError()) + + if self.propagate_exceptions: + # if we want to repropagate the exception, we can attempt to + # raise it with the whole traceback in case we can do that + # (the function was actually called from the except part) + # otherwise, we just raise the error again + if exc_value is e: + reraise(exc_type, exc_value, tb) + else: + raise e + + self.log_exception((exc_type, exc_value, tb)) + if handler is None: + return InternalServerError() + return self.finalize_request(handler(e), from_error_handler=True) + + def log_exception(self, exc_info): + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error('Exception on %s [%s]' % ( + request.path, + request.method + ), exc_info=exc_info) + + def raise_routing_exception(self, request): + """Exceptions that are recording during routing are reraised with + this method. During debug we are not reraising redirect requests + for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising + a different error instead to help debug situations. + + :internal: + """ + if not self.debug \ + or not isinstance(request.routing_exception, RequestRedirect) \ + or request.method in ('GET', 'HEAD', 'OPTIONS'): + raise request.routing_exception + + from .debughelpers import FormDataRoutingRedirect + raise FormDataRoutingRedirect(request) + + def dispatch_request(self): + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = _request_ctx_stack.top.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule = req.url_rule + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if getattr(rule, 'provide_automatic_options', False) \ + and req.method == 'OPTIONS': + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + return self.view_functions[rule.endpoint](**req.view_args) + + def full_dispatch_request(self): + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + self.try_trigger_before_first_request_functions() + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request(self, rv, from_error_handler=False): + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send(self, response=response) + except Exception: + if not from_error_handler: + raise + self.logger.exception('Request finalizing failed with an ' + 'error while handling an error') + return response + + def try_trigger_before_first_request_functions(self): + """Called before each request and will ensure that it triggers + the :attr:`before_first_request_funcs` and only exactly once per + application instance (which means process usually). + + :internal: + """ + if self._got_first_request: + return + with self._before_request_lock: + if self._got_first_request: + return + for func in self.before_first_request_funcs: + func() + self._got_first_request = True + + def make_default_options_response(self): + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = _request_ctx_stack.top.url_adapter + if hasattr(adapter, 'allowed_methods'): + methods = adapter.allowed_methods() + else: + # fallback for Werkzeug < 0.7 + methods = [] + try: + adapter.match(method='--') + except MethodNotAllowed as e: + methods = e.valid_methods + except HTTPException as e: + pass + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error): + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def make_response(self, rv): + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` (``unicode`` in Python 2) + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` (``str`` in Python 2) + A response object is created with the bytes as the body. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv + # other sized tuples are not allowed + else: + raise TypeError( + 'The view function did not return a valid response tuple.' + ' The tuple must have the form (body, status, headers),' + ' (body, status), or (body, headers).' + ) + + # the body must not be None + if rv is None: + raise TypeError( + 'The view function did not return a valid response. The' + ' function either returned None or ended without a return' + ' statement.' + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (text_type, bytes, bytearray)): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class(rv, status=status, headers=headers) + status = headers = None + else: + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type(rv, request.environ) + except TypeError as e: + new_error = TypeError( + '{e}\nThe view function did not return a valid' + ' response. The return type must be a string, tuple,' + ' Response instance, or WSGI callable, but it was a' + ' {rv.__class__.__name__}.'.format(e=e, rv=rv) + ) + reraise(TypeError, new_error, sys.exc_info()[2]) + + # prefer the status if it was provided + if status is not None: + if isinstance(status, (text_type, bytes, bytearray)): + rv.status = status + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.extend(headers) + + return rv + + def create_url_adapter(self, request): + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + subdomain = ((self.url_map.default_subdomain or None) + if not self.subdomain_matching else None) + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config['SERVER_NAME'], + subdomain=subdomain) + # We need at the very least the server name to be set for this + # to work. + if self.config['SERVER_NAME'] is not None: + return self.url_map.bind( + self.config['SERVER_NAME'], + script_name=self.config['APPLICATION_ROOT'], + url_scheme=self.config['PREFERRED_URL_SCHEME']) + + def inject_url_defaults(self, endpoint, values): + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + funcs = self.url_default_functions.get(None, ()) + if '.' in endpoint: + bp = endpoint.rsplit('.', 1)[0] + funcs = chain(funcs, self.url_default_functions.get(bp, ())) + for func in funcs: + func(endpoint, values) + + def handle_url_build_error(self, error, endpoint, values): + """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. + """ + exc_type, exc_value, tb = sys.exc_info() + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + if rv is not None: + return rv + except BuildError as e: + # make error available outside except block (py3) + error = e + + # At this point we want to reraise the exception. If the error is + # still the same one we can reraise it with the original traceback, + # otherwise we raise it from here. + if error is exc_value: + reraise(exc_type, exc_value, tb) + raise error + + def preprocess_request(self): + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + + bp = _request_ctx_stack.top.request.blueprint + + funcs = self.url_value_preprocessors.get(None, ()) + if bp is not None and bp in self.url_value_preprocessors: + funcs = chain(funcs, self.url_value_preprocessors[bp]) + for func in funcs: + func(request.endpoint, request.view_args) + + funcs = self.before_request_funcs.get(None, ()) + if bp is not None and bp in self.before_request_funcs: + funcs = chain(funcs, self.before_request_funcs[bp]) + for func in funcs: + rv = func() + if rv is not None: + return rv + + def process_response(self, response): + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = _request_ctx_stack.top + bp = ctx.request.blueprint + funcs = ctx._after_request_functions + if bp is not None and bp in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[bp])) + if None in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[None])) + for handler in funcs: + response = handler(response) + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + return response + + def do_teardown_request(self, exc=_sentinel): + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() `, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + funcs = reversed(self.teardown_request_funcs.get(None, ())) + bp = _request_ctx_stack.top.request.blueprint + if bp is not None and bp in self.teardown_request_funcs: + funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) + for func in funcs: + func(exc) + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext(self, exc=_sentinel): + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() `. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + for func in reversed(self.teardown_appcontext_funcs): + func(exc) + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self): + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() ` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ): + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args, **kwargs): + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from flask.testing import make_test_environ_builder + + builder = make_test_environ_builder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ, start_response): + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if self.should_ignore_error(error): + error = None + ctx.auto_pop(error) + + def __call__(self, environ, start_response): + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app` which can be + wrapped to applying middleware.""" + return self.wsgi_app(environ, start_response) + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, + self.name, + ) diff --git a/flask/venv/lib/python3.6/site-packages/flask/blueprints.py b/flask/venv/lib/python3.6/site-packages/flask/blueprints.py new file mode 100644 index 0000000..5ce5561 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/blueprints.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +""" + flask.blueprints + ~~~~~~~~~~~~~~~~ + + Blueprints are the recommended way to implement larger or more + pluggable applications in Flask 0.7 and later. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" +from functools import update_wrapper +from werkzeug.urls import url_join + +from .helpers import _PackageBoundObject, _endpoint_from_view_func + + +class BlueprintSetupState(object): + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__(self, blueprint, app, options, first_registration): + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get('subdomain') + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get('url_prefix') + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get('url_defaults', ())) + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = '/'.join(( + self.url_prefix.rstrip('/'), rule.lstrip('/'))) + else: + rule = self.url_prefix + options.setdefault('subdomain', self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + defaults = self.url_defaults + if 'defaults' in options: + defaults = dict(defaults, **options.pop('defaults')) + self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), + view_func, defaults=defaults, **options) + + +class Blueprint(_PackageBoundObject): + """Represents a blueprint. A blueprint is an object that records + functions that will be called with the + :class:`~flask.blueprints.BlueprintSetupState` later to register functions + or other things on the main application. See :ref:`blueprints` for more + information. + + .. versionadded:: 0.7 + """ + + warn_on_modifications = False + _got_registered_once = False + + #: Blueprint local JSON decoder class to use. + #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`. + json_encoder = None + #: Blueprint local JSON decoder class to use. + #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`. + json_decoder = None + + # TODO remove the next three attrs when Sphinx :inherited-members: works + # https://github.com/sphinx-doc/sphinx/issues/741 + + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__(self, name, import_name, static_folder=None, + static_url_path=None, template_folder=None, + url_prefix=None, subdomain=None, url_defaults=None, + root_path=None): + _PackageBoundObject.__init__(self, import_name, template_folder, + root_path=root_path) + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.static_folder = static_folder + self.static_url_path = static_url_path + self.deferred_functions = [] + if url_defaults is None: + url_defaults = {} + self.url_values_defaults = url_defaults + + def record(self, func): + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + if self._got_registered_once and self.warn_on_modifications: + from warnings import warn + warn(Warning('The blueprint was already registered once ' + 'but is getting modified now. These changes ' + 'will not show up.')) + self.deferred_functions.append(func) + + def record_once(self, func): + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + def wrapper(state): + if state.first_registration: + func(state) + return self.record(update_wrapper(wrapper, func)) + + def make_setup_state(self, app, options, first_registration=False): + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + def register(self, app, options, first_registration=False): + """Called by :meth:`Flask.register_blueprint` to register all views + and callbacks registered on the blueprint with the application. Creates + a :class:`.BlueprintSetupState` and calls each :meth:`record` callback + with it. + + :param app: The application this blueprint is being registered with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + :param first_registration: Whether this is the first time this + blueprint has been registered on the application. + """ + self._got_registered_once = True + state = self.make_setup_state(app, options, first_registration) + + if self.has_static_folder: + state.add_url_rule( + self.static_url_path + '/', + view_func=self.send_static_file, endpoint='static' + ) + + for deferred in self.deferred_functions: + deferred(state) + + def route(self, rule, **options): + """Like :meth:`Flask.route` but for a blueprint. The endpoint for the + :func:`url_for` function is prefixed with the name of the blueprint. + """ + def decorator(f): + endpoint = options.pop("endpoint", f.__name__) + self.add_url_rule(rule, endpoint, f, **options) + return f + return decorator + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint: + assert '.' not in endpoint, "Blueprint endpoints should not contain dots" + if view_func and hasattr(view_func, '__name__'): + assert '.' not in view_func.__name__, "Blueprint view function name should not contain dots" + self.record(lambda s: + s.add_url_rule(rule, endpoint, view_func, **options)) + + def endpoint(self, endpoint): + """Like :meth:`Flask.endpoint` but for a blueprint. This does not + prefix the endpoint with the blueprint name, this has to be done + explicitly by the user of this method. If the endpoint is prefixed + with a `.` it will be registered to the current blueprint, otherwise + it's an application independent endpoint. + """ + def decorator(f): + def register_endpoint(state): + state.app.view_functions[endpoint] = f + self.record_once(register_endpoint) + return f + return decorator + + def app_template_filter(self, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_filter(f, name=name) + return f + return decorator + + def add_app_template_filter(self, f, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.filters[name or f.__name__] = f + self.record_once(register_template) + + def app_template_test(self, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_test(f, name=name) + return f + return decorator + + def add_app_template_test(self, f, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.tests[name or f.__name__] = f + self.record_once(register_template) + + def app_template_global(self, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_global(f, name=name) + return f + return decorator + + def add_app_template_global(self, f, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.globals[name or f.__name__] = f + self.record_once(register_template) + + def before_request(self, f): + """Like :meth:`Flask.before_request` but for a blueprint. This function + is only executed before each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def before_app_request(self, f): + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(None, []).append(f)) + return f + + def before_app_first_request(self, f): + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + """ + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + def after_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. This function + is only executed after each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def after_app_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(None, []).append(f)) + return f + + def teardown_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. This + function is only executed when tearing down requests handled by a + function of that blueprint. Teardown request functions are executed + when the request context is popped, even when no actual request was + performed. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def teardown_app_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(None, []).append(f)) + return f + + def context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. This + function is only executed for requests handled by a blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(self.name, []).append(f)) + return f + + def app_context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(None, []).append(f)) + return f + + def app_errorhandler(self, code): + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + def decorator(f): + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + return decorator + + def url_value_preprocessor(self, f): + """Registers a function as URL value preprocessor for this + blueprint. It's called before the view functions are called and + can modify the url values provided. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(self.name, []).append(f)) + return f + + def url_defaults(self, f): + """Callback function for URL defaults for this blueprint. It's called + with the endpoint and values and should update the values passed + in place. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(self.name, []).append(f)) + return f + + def app_url_value_preprocessor(self, f): + """Same as :meth:`url_value_preprocessor` but application wide. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(None, []).append(f)) + return f + + def app_url_defaults(self, f): + """Same as :meth:`url_defaults` but application wide. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(None, []).append(f)) + return f + + def errorhandler(self, code_or_exception): + """Registers an error handler that becomes active for this blueprint + only. Please be aware that routing does not happen local to a + blueprint so an error handler for 404 usually is not handled by + a blueprint unless it is caused inside a view function. Another + special case is the 500 internal server error which is always looked + up from the application. + + Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator + of the :class:`~flask.Flask` object. + """ + def decorator(f): + self.record_once(lambda s: s.app._register_error_handler( + self.name, code_or_exception, f)) + return f + return decorator + + def register_error_handler(self, code_or_exception, f): + """Non-decorator version of the :meth:`errorhandler` error attach + function, akin to the :meth:`~flask.Flask.register_error_handler` + application-wide function of the :class:`~flask.Flask` object but + for error handlers limited to this blueprint. + + .. versionadded:: 0.11 + """ + self.record_once(lambda s: s.app._register_error_handler( + self.name, code_or_exception, f)) diff --git a/flask/venv/lib/python3.6/site-packages/flask/cli.py b/flask/venv/lib/python3.6/site-packages/flask/cli.py new file mode 100644 index 0000000..efc1733 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/cli.py @@ -0,0 +1,898 @@ +# -*- coding: utf-8 -*- +""" + flask.cli + ~~~~~~~~~ + + A simple command line application to run flask apps. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +from __future__ import print_function + +import ast +import inspect +import os +import re +import ssl +import sys +import traceback +from functools import update_wrapper +from operator import attrgetter +from threading import Lock, Thread + +import click +from werkzeug.utils import import_string + +from . import __version__ +from ._compat import getargspec, iteritems, reraise, text_type +from .globals import current_app +from .helpers import get_debug_flag, get_env, get_load_dotenv + +try: + import dotenv +except ImportError: + dotenv = None + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(script_info, module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ('app', 'application'): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [ + v for k, v in iteritems(module.__dict__) if isinstance(v, Flask) + ] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + 'Detected multiple Flask applications in module "{module}". Use ' + '"FLASK_APP={module}:name" to specify the correct ' + 'one.'.format(module=module.__name__) + ) + + # Search for app factory functions. + for attr_name in ('create_app', 'make_app'): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = call_factory(script_info, app_factory) + + if isinstance(app, Flask): + return app + except TypeError: + if not _called_with_wrong_args(app_factory): + raise + raise NoAppException( + 'Detected factory "{factory}" in module "{module}", but ' + 'could not call it without arguments. Use ' + '"FLASK_APP=\'{module}:{factory}(args)\'" to specify ' + 'arguments.'.format( + factory=attr_name, module=module.__name__ + ) + ) + + raise NoAppException( + 'Failed to find Flask application or factory in module "{module}". ' + 'Use "FLASK_APP={module}:name to specify one.'.format( + module=module.__name__ + ) + ) + + +def call_factory(script_info, app_factory, arguments=()): + """Takes an app factory, a ``script_info` object and optionally a tuple + of arguments. Checks for the existence of a script_info argument and calls + the app_factory depending on that and the arguments provided. + """ + args_spec = getargspec(app_factory) + arg_names = args_spec.args + arg_defaults = args_spec.defaults + + if 'script_info' in arg_names: + return app_factory(*arguments, script_info=script_info) + elif arguments: + return app_factory(*arguments) + elif not arguments and len(arg_names) == 1 and arg_defaults is None: + return app_factory(script_info) + + return app_factory() + + +def _called_with_wrong_args(factory): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param factory: the factory function that was called + :return: true if the call failed + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is factory.__code__: + # in the factory, it was called successfully + return False + + tb = tb.tb_next + + # didn't reach the factory + return True + finally: + del tb + + +def find_app_by_string(script_info, module, app_name): + """Checks if the given string is a variable name or a function. If it is a + function, it checks for specified arguments and whether it takes a + ``script_info`` argument and calls the function with the appropriate + arguments. + """ + from flask import Flask + match = re.match(r'^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$', app_name) + + if not match: + raise NoAppException( + '"{name}" is not a valid variable name or function ' + 'expression.'.format(name=app_name) + ) + + name, args = match.groups() + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException(e.args[0]) + + if inspect.isfunction(attr): + if args: + try: + args = ast.literal_eval('({args},)'.format(args=args)) + except (ValueError, SyntaxError)as e: + raise NoAppException( + 'Could not parse the arguments in ' + '"{app_name}".'.format(e=e, app_name=app_name) + ) + else: + args = () + + try: + app = call_factory(script_info, attr, args) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + '{e}\nThe factory "{app_name}" in module "{module}" could not ' + 'be called with the specified arguments.'.format( + e=e, app_name=app_name, module=module.__name__ + ) + ) + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + 'A valid Flask application was not obtained from ' + '"{module}:{app_name}".'.format( + module=module.__name__, app_name=app_name + ) + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + if os.path.splitext(path)[1] == '.py': + path = os.path.splitext(path)[0] + + if os.path.basename(path) == '__init__': + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, '__init__.py')): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return '.'.join(module_name[::-1]) + + +def locate_app(script_info, module_name, app_name, raise_if_not_found=True): + __traceback_hide__ = True + + try: + __import__(module_name) + except ImportError: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[-1].tb_next: + raise NoAppException( + 'While importing "{name}", an ImportError was raised:' + '\n\n{tb}'.format(name=module_name, tb=traceback.format_exc()) + ) + elif raise_if_not_found: + raise NoAppException( + 'Could not import "{name}".'.format(name=module_name) + ) + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(script_info, module) + else: + return find_app_by_string(script_info, module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + message = 'Flask %(version)s\nPython %(python_version)s' + click.echo(message % { + 'version': __version__, + 'python_version': sys.version, + }, color=ctx.color) + ctx.exit() + + +version_option = click.Option( + ['--version'], + help='Show the flask version', + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True +) + + +class DispatchingApp(object): + """Special application that dispatches to a Flask application which + is imported by name in a background thread. If an error happens + it is recorded and shown as part of the WSGI handling which in case + of the Werkzeug debugger means that it shows up in the browser. + """ + + def __init__(self, loader, use_eager_loading=False): + self.loader = loader + self._app = None + self._lock = Lock() + self._bg_loading_exc_info = None + if use_eager_loading: + self._load_unlocked() + else: + self._load_in_background() + + def _load_in_background(self): + def _load_app(): + __traceback_hide__ = True + with self._lock: + try: + self._load_unlocked() + except Exception: + self._bg_loading_exc_info = sys.exc_info() + t = Thread(target=_load_app, args=()) + t.start() + + def _flush_bg_loading_exception(self): + __traceback_hide__ = True + exc_info = self._bg_loading_exc_info + if exc_info is not None: + self._bg_loading_exc_info = None + reraise(*exc_info) + + def _load_unlocked(self): + __traceback_hide__ = True + self._app = rv = self.loader() + self._bg_loading_exc_info = None + return rv + + def __call__(self, environ, start_response): + __traceback_hide__ = True + if self._app is not None: + return self._app(environ, start_response) + self._flush_bg_loading_exception() + with self._lock: + if self._app is not None: + rv = self._app + else: + rv = self._load_unlocked() + return rv(environ, start_response) + + +class ScriptInfo(object): + """Help object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__(self, app_import_path=None, create_app=None): + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path or os.environ.get('FLASK_APP') + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data = {} + self._loaded_app = None + + def load_app(self): + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + __traceback_hide__ = True + + if self._loaded_app is not None: + return self._loaded_app + + app = None + + if self.create_app is not None: + app = call_factory(self, self.create_app) + else: + if self.app_import_path: + path, name = (self.app_import_path.split(':', 1) + [None])[:2] + import_name = prepare_import(path) + app = locate_app(self, import_name, name) + else: + for path in ('wsgi.py', 'app.py'): + import_name = prepare_import(path) + app = locate_app(self, import_name, None, + raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + 'Could not locate a Flask application. You did not provide ' + 'the "FLASK_APP" environment variable, and a "wsgi.py" or ' + '"app.py" module was not found in the current directory.' + ) + + debug = get_debug_flag() + + # Update the app's debug flag through the descriptor so that other + # values repopulate as well. + if debug is not None: + app.debug = debug + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. If callbacks are registered directly + to the ``app.cli`` object then they are wrapped with this function + by default unless it's disabled. + """ + @click.pass_context + def decorator(__ctx, *args, **kwargs): + with __ctx.ensure_object(ScriptInfo).load_app().app_context(): + return __ctx.invoke(f, *args, **kwargs) + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop('with_appcontext', True) + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault('cls', AppGroup) + return click.Group.group(self, *args, **kwargs) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. + + For information as of why this is useful see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands wil be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__(self, add_default_commands=True, create_app=None, + add_version_option=True, load_dotenv=True, **extra): + params = list(extra.pop('params', None) or ()) + + if add_version_option: + params.append(version_option) + + AppGroup.__init__(self, params=params, **extra) + self.create_app = create_app + self.load_dotenv = load_dotenv + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + try: + import pkg_resources + except ImportError: + self._loaded_plugin_commands = True + return + + for ep in pkg_resources.iter_entry_points('flask.commands'): + self.add_command(ep.load(), ep.name) + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + + # We load built-in commands first as these should always be the + # same no matter what the app does. If the app does want to + # override this it needs to make a custom instance of this group + # and not attach the default commands. + # + # This also means that the script stays functional in case the + # application completely fails. + rv = AppGroup.get_command(self, ctx, name) + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + try: + rv = info.load_app().cli.get_command(ctx, name) + if rv is not None: + return rv + except NoAppException: + pass + + def list_commands(self, ctx): + self._load_plugin_commands() + + # The commands available is the list of both the application (if + # available) plus the builtin commands. + rv = set(click.Group.list_commands(self, ctx)) + info = ctx.ensure_object(ScriptInfo) + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except Exception: + # Here we intentionally swallow all exceptions as we don't + # want the help page to break if the app does not exist. + # If someone attempts to use the command we try to create + # the app again and this will give us the error. + # However, we will not do so silently because that would confuse + # users. + traceback.print_exc() + return sorted(rv) + + def main(self, *args, **kwargs): + # Set a global flag that indicates that we were invoked from the + # command line interface. This is detected by Flask.run to make the + # call into a no-op. This is necessary to avoid ugly errors when the + # script that is loaded here also attempts to start a server. + os.environ['FLASK_RUN_FROM_CLI'] = 'true' + + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + obj = kwargs.get('obj') + + if obj is None: + obj = ScriptInfo(create_app=self.create_app) + + kwargs['obj'] = obj + kwargs.setdefault('auto_envvar_prefix', 'FLASK') + return super(FlaskGroup, self).main(*args, **kwargs) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path):].lstrip(os.sep)) == other + + +def load_dotenv(path=None): + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + Changes the current working directory to the location of the first file + found, with the assumption that it is in the top level project directory + and will be where the Python path should import local packages from. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionadded:: 1.0 + """ + if dotenv is None: + if path or os.path.exists('.env') or os.path.exists('.flaskenv'): + click.secho( + ' * Tip: There are .env files present.' + ' Do "pip install python-dotenv" to use them.', + fg='yellow') + return + + if path is not None: + return dotenv.load_dotenv(path) + + new_dir = None + + for name in ('.env', '.flaskenv'): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + if new_dir is None: + new_dir = os.path.dirname(path) + + dotenv.load_dotenv(path) + + if new_dir and os.getcwd() != new_dir: + os.chdir(new_dir) + + return new_dir is not None # at least one file was located and loaded + + +def show_server_banner(env, debug, app_import_path, eager_loading): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': + return + + if app_import_path is not None: + message = ' * Serving Flask app "{0}"'.format(app_import_path) + + if not eager_loading: + message += ' (lazy loading)' + + click.echo(message) + + click.echo(' * Environment: {0}'.format(env)) + + if env == 'production': + click.secho( + ' WARNING: Do not use the development server in a production' + ' environment.', fg='red') + click.secho(' Use a production WSGI server instead.', dim=True) + + if debug is not None: + click.echo(' * Debug mode: {0}'.format('on' if debug else 'off')) + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = 'path' + + def __init__(self): + self.path_type = click.Path( + exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == 'adhoc': + try: + import OpenSSL + except ImportError: + raise click.BadParameter( + 'Using ad-hoc certificates requires pyOpenSSL.', + ctx, param) + + return value + + obj = import_string(value, silent=True) + + if sys.version_info < (2, 7): + if obj: + return obj + else: + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get('cert') + is_adhoc = cert == 'adhoc' + + if sys.version_info < (2, 7): + is_context = cert and not isinstance(cert, (text_type, bytes)) + else: + is_context = isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', + ctx, param) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', + ctx, param) + + if not cert: + raise click.BadParameter( + '"--cert" must also be specified.', + ctx, param) + + ctx.params['cert'] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter( + 'Required when using "--cert".', + ctx, param) + + return value + + +@click.command('run', short_help='Runs a development server.') +@click.option('--host', '-h', default='127.0.0.1', + help='The interface to bind to.') +@click.option('--port', '-p', default=5000, + help='The port to bind to.') +@click.option('--cert', type=CertParamType(), + help='Specify a certificate file to use HTTPS.') +@click.option('--key', + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, expose_value=False, + help='The key file to use when specifying a certificate.') +@click.option('--reload/--no-reload', default=None, + help='Enable or disable the reloader. By default the reloader ' + 'is active if debug is enabled.') +@click.option('--debugger/--no-debugger', default=None, + help='Enable or disable the debugger. By default the debugger ' + 'is active if debug is enabled.') +@click.option('--eager-loading/--lazy-loader', default=None, + help='Enable or disable eager loading. By default eager ' + 'loading is enabled if the reloader is disabled.') +@click.option('--with-threads/--without-threads', default=True, + help='Enable or disable multithreading.') +@pass_script_info +def run_command(info, host, port, reload, debugger, eager_loading, + with_threads, cert): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default if + FLASK_ENV=development or FLASK_DEBUG=1. + """ + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + if eager_loading is None: + eager_loading = not reload + + show_server_banner(get_env(), debug, info.app_import_path, eager_loading) + app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) + + from werkzeug.serving import run_simple + run_simple(host, port, app, use_reloader=reload, use_debugger=debugger, + threaded=with_threads, ssl_context=cert) + + +@click.command('shell', short_help='Runs a shell in the app context.') +@with_appcontext +def shell_command(): + """Runs an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to it's configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + from flask.globals import _app_ctx_stack + app = _app_ctx_stack.top.app + banner = 'Python %s on %s\nApp: %s [%s]\nInstance: %s' % ( + sys.version, + sys.platform, + app.import_name, + app.env, + app.instance_path, + ) + ctx = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get('PYTHONSTARTUP') + if startup and os.path.isfile(startup): + with open(startup, 'r') as f: + eval(compile(f.read(), startup, 'exec'), ctx) + + ctx.update(app.make_shell_context()) + + code.interact(banner=banner, local=ctx) + + +@click.command('routes', short_help='Show the routes for the app.') +@click.option( + '--sort', '-s', + type=click.Choice(('endpoint', 'methods', 'rule', 'match')), + default='endpoint', + help=( + 'Method to sort routes by. "match" is the order that Flask will match ' + 'routes when dispatching a request.' + ) +) +@click.option( + '--all-methods', + is_flag=True, + help="Show HEAD and OPTIONS methods." +) +@with_appcontext +def routes_command(sort, all_methods): + """Show all registered routes with endpoints and methods.""" + + rules = list(current_app.url_map.iter_rules()) + if not rules: + click.echo('No routes were registered.') + return + + ignored_methods = set(() if all_methods else ('HEAD', 'OPTIONS')) + + if sort in ('endpoint', 'rule'): + rules = sorted(rules, key=attrgetter(sort)) + elif sort == 'methods': + rules = sorted(rules, key=lambda rule: sorted(rule.methods)) + + rule_methods = [ + ', '.join(sorted(rule.methods - ignored_methods)) for rule in rules + ] + + headers = ('Endpoint', 'Methods', 'Rule') + widths = ( + max(len(rule.endpoint) for rule in rules), + max(len(methods) for methods in rule_methods), + max(len(rule.rule) for rule in rules), + ) + widths = [max(len(h), w) for h, w in zip(headers, widths)] + row = '{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}'.format(*widths) + + click.echo(row.format(*headers).strip()) + click.echo(row.format(*('-' * width for width in widths))) + + for rule, methods in zip(rules, rule_methods): + click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) + + +cli = FlaskGroup(help="""\ +A general utility script for Flask applications. + +Provides commands from Flask, extensions, and the application. Loads the +application defined in the FLASK_APP environment variable, or from a wsgi.py +file. Setting the FLASK_ENV environment variable to 'development' will enable +debug mode. + +\b + {prefix}{cmd} FLASK_APP=hello.py + {prefix}{cmd} FLASK_ENV=development + {prefix}flask run +""".format( + cmd='export' if os.name == 'posix' else 'set', + prefix='$ ' if os.name == 'posix' else '> ' +)) + + +def main(as_module=False): + args = sys.argv[1:] + + if as_module: + this_module = 'flask' + + if sys.version_info < (2, 7): + this_module += '.cli' + + name = 'python -m ' + this_module + + # Python rewrites "python -m flask" to the path to the file in argv. + # Restore the original command so that the reloader works. + sys.argv = ['-m', this_module] + args + else: + name = None + + cli.main(args=args, prog_name=name) + + +if __name__ == '__main__': + main(as_module=True) diff --git a/flask/venv/lib/python3.6/site-packages/flask/config.py b/flask/venv/lib/python3.6/site-packages/flask/config.py new file mode 100644 index 0000000..d6074ba --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/config.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +""" + flask.config + ~~~~~~~~~~~~ + + Implements the configuration related objects. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import os +import types +import errno + +from werkzeug.utils import import_string +from ._compat import string_types, iteritems +from . import json + + +class ConfigAttribute(object): + """Makes an attribute forward to the config""" + + def __init__(self, name, get_converter=None): + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj, type=None): + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj, value): + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__(self, root_path, defaults=None): + dict.__init__(self, defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name, silent=False): + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: bool. ``True`` if able to load config, ``False`` otherwise. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError('The environment variable %r is not set ' + 'and as such configuration could not be ' + 'loaded. Set this variable and make it ' + 'point to a configuration file' % + variable_name) + return self.from_pyfile(rv, silent=silent) + + def from_pyfile(self, filename, silent=False): + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = types.ModuleType('config') + d.__file__ = filename + try: + with open(filename, mode='rb') as config_file: + exec(compile(config_file.read(), filename, 'exec'), d.__dict__) + except IOError as e: + if silent and e.errno in ( + errno.ENOENT, errno.EISDIR, errno.ENOTDIR + ): + return False + e.strerror = 'Unable to load configuration file (%s)' % e.strerror + raise + self.from_object(d) + return True + + def from_object(self, obj): + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. :meth:`from_object` + loads only the uppercase attributes of the module/class. A ``dict`` + object will not work with :meth:`from_object` because the keys of a + ``dict`` are not attributes of the ``dict`` class. + + Example of module-based configuration:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + See :ref:`config-dev-prod` for an example of class-based configuration + using :meth:`from_object`. + + :param obj: an import name or object + """ + if isinstance(obj, string_types): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def from_json(self, filename, silent=False): + """Updates the values in the config from a JSON file. This function + behaves as if the JSON object was a dictionary and passed to the + :meth:`from_mapping` function. + + :param filename: the filename of the JSON file. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + + .. versionadded:: 0.11 + """ + filename = os.path.join(self.root_path, filename) + + try: + with open(filename) as json_file: + obj = json.loads(json_file.read()) + except IOError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + e.strerror = 'Unable to load configuration file (%s)' % e.strerror + raise + return self.from_mapping(obj) + + def from_mapping(self, *mapping, **kwargs): + """Updates the config like :meth:`update` ignoring items with non-upper + keys. + + .. versionadded:: 0.11 + """ + mappings = [] + if len(mapping) == 1: + if hasattr(mapping[0], 'items'): + mappings.append(mapping[0].items()) + else: + mappings.append(mapping[0]) + elif len(mapping) > 1: + raise TypeError( + 'expected at most 1 positional argument, got %d' % len(mapping) + ) + mappings.append(kwargs.items()) + for mapping in mappings: + for (key, value) in mapping: + if key.isupper(): + self[key] = value + return True + + def get_namespace(self, namespace, lowercase=True, trim_namespace=True): + """Returns a dictionary containing a subset of configuration options + that match the specified namespace/prefix. Example usage:: + + app.config['IMAGE_STORE_TYPE'] = 'fs' + app.config['IMAGE_STORE_PATH'] = '/var/app/images' + app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' + image_store_config = app.config.get_namespace('IMAGE_STORE_') + + The resulting dictionary `image_store_config` would look like:: + + { + 'type': 'fs', + 'path': '/var/app/images', + 'base_url': 'http://img.website.com' + } + + This is often useful when configuration options map directly to + keyword arguments in functions or class constructors. + + :param namespace: a configuration namespace + :param lowercase: a flag indicating if the keys of the resulting + dictionary should be lowercase + :param trim_namespace: a flag indicating if the keys of the resulting + dictionary should not include the namespace + + .. versionadded:: 0.11 + """ + rv = {} + for k, v in iteritems(self): + if not k.startswith(namespace): + continue + if trim_namespace: + key = k[len(namespace):] + else: + key = k + if lowercase: + key = key.lower() + rv[key] = v + return rv + + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self)) diff --git a/flask/venv/lib/python3.6/site-packages/flask/ctx.py b/flask/venv/lib/python3.6/site-packages/flask/ctx.py new file mode 100644 index 0000000..8472c92 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/ctx.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +""" + flask.ctx + ~~~~~~~~~ + + Implements the objects required to keep the context. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import sys +from functools import update_wrapper + +from werkzeug.exceptions import HTTPException + +from .globals import _request_ctx_stack, _app_ctx_stack +from .signals import appcontext_pushed, appcontext_popped +from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals(object): + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + def get(self, name, default=None): + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name, default=_sentinel): + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raise a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name, default=None): + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param: default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item): + return item in self.__dict__ + + def __iter__(self): + return iter(self.__dict__) + + def __repr__(self): + top = _app_ctx_stack.top + if top is not None: + return '' % top.app.name + return object.__repr__(self) + + +def after_this_request(f): + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + _request_ctx_stack.top._after_request_functions.append(f) + return f + + +def copy_current_request_context(f): + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request like you + # would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + top = _request_ctx_stack.top + if top is None: + raise RuntimeError('This decorator can only be used at local scopes ' + 'when a request context is on the stack. For instance within ' + 'view functions.') + reqctx = top.copy() + def wrapper(*args, **kwargs): + with reqctx: + return f(*args, **kwargs) + return update_wrapper(wrapper, f) + + +def has_request_context(): + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g` for truthness):: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _request_ctx_stack.top is not None + + +def has_app_context(): + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _app_ctx_stack.top is not None + + +class AppContext(object): + """The application context binds an application object implicitly + to the current thread or greenlet, similar to how the + :class:`RequestContext` binds request information. The application + context is also implicitly created if a request context is created + but the application is not on top of the individual application + context. + """ + + def __init__(self, app): + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g = app.app_ctx_globals_class() + + # Like request context, app contexts can be pushed multiple times + # but there a basic "refcount" is enough to track them. + self._refcnt = 0 + + def push(self): + """Binds the app context to the current context.""" + self._refcnt += 1 + if hasattr(sys, 'exc_clear'): + sys.exc_clear() + _app_ctx_stack.push(self) + appcontext_pushed.send(self.app) + + def pop(self, exc=_sentinel): + """Pops the app context.""" + try: + self._refcnt -= 1 + if self._refcnt <= 0: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + rv = _app_ctx_stack.pop() + assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ + % (rv, self) + appcontext_popped.send(self.app) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.pop(exc_value) + + if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: + reraise(exc_type, exc_value, tb) + + +class RequestContext(object): + """The request context contains all request relevant information. It is + created at the beginning of the request and pushed to the + `_request_ctx_stack` and removed at the end of it. It will create the + URL adapter and request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the request + for you. In debug mode the request context is kept around if + exceptions happen so that interactive debuggers have a chance to + introspect the data. With 0.4 this can also be forced for requests + that did not fail and outside of ``DEBUG`` mode. By setting + ``'flask._preserve_context'`` to ``True`` on the WSGI environment the + context will not pop itself at the end of the request. This is used by + the :meth:`~flask.Flask.test_client` for example to implement the + deferred cleanup functionality. + + You might find this helpful for unittests where you need the + information from the context local around for a little longer. Make + sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in + that situation, otherwise your unittests will leak memory. + """ + + def __init__(self, app, environ, request=None): + self.app = app + if request is None: + request = app.request_class(environ) + self.request = request + self.url_adapter = app.create_url_adapter(self.request) + self.flashes = None + self.session = None + + # Request contexts can be pushed multiple times and interleaved with + # other request contexts. Now only if the last level is popped we + # get rid of them. Additionally if an application context is missing + # one is created implicitly so for each level we add this information + self._implicit_app_ctx_stack = [] + + # indicator if the context was preserved. Next time another context + # is pushed the preserved context is popped. + self.preserved = False + + # remembers the exception for pop if there is one in case the context + # preservation kicks in. + self._preserved_exc = None + + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions = [] + + self.match_request() + + def _get_g(self): + return _app_ctx_stack.top.g + def _set_g(self, value): + _app_ctx_stack.top.g = value + g = property(_get_g, _set_g) + del _get_g, _set_g + + def copy(self): + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + """ + return self.__class__(self.app, + environ=self.request.environ, + request=self.request + ) + + def match_request(self): + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + url_rule, self.request.view_args = \ + self.url_adapter.match(return_rule=True) + self.request.url_rule = url_rule + except HTTPException as e: + self.request.routing_exception = e + + def push(self): + """Binds the request context to the current context.""" + # If an exception occurs in debug mode or if context preservation is + # activated under exception situations exactly one context stays + # on the stack. The rationale is that you want to access that + # information under debug situations. However if someone forgets to + # pop that context again we want to make sure that on the next push + # it's invalidated, otherwise we run at risk that something leaks + # memory. This is usually only a problem in test suite since this + # functionality is not active in production environments. + top = _request_ctx_stack.top + if top is not None and top.preserved: + top.pop(top._preserved_exc) + + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _app_ctx_stack.top + if app_ctx is None or app_ctx.app != self.app: + app_ctx = self.app.app_context() + app_ctx.push() + self._implicit_app_ctx_stack.append(app_ctx) + else: + self._implicit_app_ctx_stack.append(None) + + if hasattr(sys, 'exc_clear'): + sys.exc_clear() + + _request_ctx_stack.push(self) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session( + self.app, self.request + ) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + def pop(self, exc=_sentinel): + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + app_ctx = self._implicit_app_ctx_stack.pop() + + try: + clear_request = False + if not self._implicit_app_ctx_stack: + self.preserved = False + self._preserved_exc = None + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + # If this interpreter supports clearing the exception information + # we do that now. This will only go into effect on Python 2.x, + # on 3.x it disappears automatically at the end of the exception + # stack. + if hasattr(sys, 'exc_clear'): + sys.exc_clear() + + request_close = getattr(self.request, 'close', None) + if request_close is not None: + request_close() + clear_request = True + finally: + rv = _request_ctx_stack.pop() + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + rv.request.environ['werkzeug.request'] = None + + # Get rid of the app as well if necessary. + if app_ctx is not None: + app_ctx.pop(exc) + + assert rv is self, 'Popped wrong request context. ' \ + '(%r instead of %r)' % (rv, self) + + def auto_pop(self, exc): + if self.request.environ.get('flask._preserve_context') or \ + (exc is not None and self.app.preserve_context_on_exception): + self.preserved = True + self._preserved_exc = exc + else: + self.pop(exc) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + # do not pop the request stack if we are in debug mode and an + # exception happened. This will allow the debugger to still + # access the request object in the interactive shell. Furthermore + # the context can be force kept alive for the test client. + # See flask.testing for how this works. + self.auto_pop(exc_value) + + if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: + reraise(exc_type, exc_value, tb) + + def __repr__(self): + return '<%s \'%s\' [%s] of %s>' % ( + self.__class__.__name__, + self.request.url, + self.request.method, + self.app.name, + ) diff --git a/flask/venv/lib/python3.6/site-packages/flask/debughelpers.py b/flask/venv/lib/python3.6/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..e9765f2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/debughelpers.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +""" + flask.debughelpers + ~~~~~~~~~~~~~~~~~~ + + Various helpers to make the development experience better. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import os +from warnings import warn + +from ._compat import implements_to_string, text_type +from .app import Flask +from .blueprints import Blueprint +from .globals import _request_ctx_stack + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +@implements_to_string +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = ['You tried to access the file "%s" in the request.files ' + 'dictionary but it does not exist. The mimetype for the request ' + 'is "%s" instead of "multipart/form-data" which means that no ' + 'file contents were transmitted. To fix this error you should ' + 'provide enctype="multipart/form-data" in your form.' % + (key, request.mimetype)] + if form_matches: + buf.append('\n\nThe browser instead transmitted some file names. ' + 'This was submitted: %s' % ', '.join('"%s"' % x + for x in form_matches)) + self.msg = ''.join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised by Flask in debug mode if it detects a + redirect caused by the routing system when the request method is not + GET, HEAD or OPTIONS. Reasoning: form data will be dropped. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = ['A request was sent to this URL (%s) but a redirect was ' + 'issued automatically by the routing system to "%s".' + % (request.url, exc.new_url)] + + # In case just a slash was appended we can be extra helpful + if request.base_url + '/' == exc.new_url.split('?')[0]: + buf.append(' The URL was defined with a trailing slash so ' + 'Flask will automatically redirect to the URL ' + 'with the trailing slash if it was accessed ' + 'without one.') + + buf.append(' Make sure to directly send your %s-request to this URL ' + 'since we can\'t make browsers or HTTP clients redirect ' + 'with form data reliably or without user interaction.' % + request.method) + buf.append('\n\nNote: this exception is only raised in debug mode') + AssertionError.__init__(self, ''.join(buf).encode('utf-8')) + + +def attach_enctype_error_multidict(request): + """Since Flask 0.8 we're monkeypatching the files object in case a + request is detected that does not use multipart form data but the files + object is accessed. + """ + oldcls = request.files.__class__ + class newcls(oldcls): + def __getitem__(self, key): + try: + return oldcls.__getitem__(self, key) + except KeyError: + if key not in request.form: + raise + raise DebugFilesKeyError(request, key) + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader): + yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__) + for key, value in sorted(loader.__dict__.items()): + if key.startswith('_'): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, (str, text_type)) for x in value): + continue + yield '%s:' % key + for item in value: + yield ' - %s' % item + continue + elif not isinstance(value, (str, text_type, int, float, bool)): + continue + yield '%s: %r' % (key, value) + + +def explain_template_loading_attempts(app, template, attempts): + """This should help developers understand what failed""" + info = ['Locating template "%s":' % template] + total_found = 0 + blueprint = None + reqctx = _request_ctx_stack.top + if reqctx is not None and reqctx.request.blueprint is not None: + blueprint = reqctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = 'application "%s"' % srcobj.import_name + elif isinstance(srcobj, Blueprint): + src_info = 'blueprint "%s" (%s)' % (srcobj.name, + srcobj.import_name) + else: + src_info = repr(srcobj) + + info.append('% 5d: trying loader of %s' % ( + idx + 1, src_info)) + + for line in _dump_loader_info(loader): + info.append(' %s' % line) + + if triple is None: + detail = 'no match' + else: + detail = 'found (%r)' % (triple[1] or '') + total_found += 1 + info.append(' -> %s' % detail) + + seems_fishy = False + if total_found == 0: + info.append('Error: the template could not be found.') + seems_fishy = True + elif total_found > 1: + info.append('Warning: multiple loaders returned a match for the template.') + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append(' The template was looked up from an endpoint that ' + 'belongs to the blueprint "%s".' % blueprint) + info.append(' Maybe you did not place a template in the right folder?') + info.append(' See http://flask.pocoo.org/docs/blueprints/#templates') + + app.logger.info('\n'.join(info)) + + +def explain_ignored_app_run(): + if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': + warn(Warning('Silently ignoring app.run() because the ' + 'application is run from the flask command line ' + 'executable. Consider putting app.run() behind an ' + 'if __name__ == "__main__" guard to silence this ' + 'warning.'), stacklevel=3) diff --git a/flask/venv/lib/python3.6/site-packages/flask/globals.py b/flask/venv/lib/python3.6/site-packages/flask/globals.py new file mode 100644 index 0000000..7d50a6f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/globals.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +""" + flask.globals + ~~~~~~~~~~~~~ + + Defines all the global objects that are proxies to the current + active context. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +from functools import partial +from werkzeug.local import LocalStack, LocalProxy + + +_request_ctx_err_msg = '''\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +''' +_app_ctx_err_msg = '''\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +to interface with the current application object in some way. To solve +this, set up an application context with app.app_context(). See the +documentation for more information.\ +''' + + +def _lookup_req_object(name): + top = _request_ctx_stack.top + if top is None: + raise RuntimeError(_request_ctx_err_msg) + return getattr(top, name) + + +def _lookup_app_object(name): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return getattr(top, name) + + +def _find_app(): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return top.app + + +# context locals +_request_ctx_stack = LocalStack() +_app_ctx_stack = LocalStack() +current_app = LocalProxy(_find_app) +request = LocalProxy(partial(_lookup_req_object, 'request')) +session = LocalProxy(partial(_lookup_req_object, 'session')) +g = LocalProxy(partial(_lookup_app_object, 'g')) diff --git a/flask/venv/lib/python3.6/site-packages/flask/helpers.py b/flask/venv/lib/python3.6/site-packages/flask/helpers.py new file mode 100644 index 0000000..df0b91f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/helpers.py @@ -0,0 +1,1044 @@ +# -*- coding: utf-8 -*- +""" + flask.helpers + ~~~~~~~~~~~~~ + + Implements various helpers. + + :copyright: © 2010 by the Pallets team. + :license: BSD, see LICENSE for more details. +""" + +import os +import socket +import sys +import pkgutil +import posixpath +import mimetypes +from time import time +from zlib import adler32 +from threading import RLock +import unicodedata +from werkzeug.routing import BuildError +from functools import update_wrapper + +from werkzeug.urls import url_quote +from werkzeug.datastructures import Headers, Range +from werkzeug.exceptions import BadRequest, NotFound, \ + RequestedRangeNotSatisfiable + +from werkzeug.wsgi import wrap_file +from jinja2 import FileSystemLoader + +from .signals import message_flashed +from .globals import session, _request_ctx_stack, _app_ctx_stack, \ + current_app, request +from ._compat import string_types, text_type, PY2 + +# sentinel +_missing = object() + + +# what separators does this operating system provide that are not a slash? +# this is used by the send_from_directory function to ensure that nobody is +# able to access files from outside the filesystem. +_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] + if sep not in (None, '/')) + + +def get_env(): + """Get the environment the app is running in, indicated by the + :envvar:`FLASK_ENV` environment variable. The default is + ``'production'``. + """ + return os.environ.get('FLASK_ENV') or 'production' + + +def get_debug_flag(): + """Get whether debug mode should be enabled for the app, indicated + by the :envvar:`FLASK_DEBUG` environment variable. The default is + ``True`` if :func:`.get_env` returns ``'development'``, or ``False`` + otherwise. + """ + val = os.environ.get('FLASK_DEBUG') + + if not val: + return get_env() == 'development' + + return val.lower() not in ('0', 'false', 'no') + + +def get_load_dotenv(default=True): + """Get whether the user has disabled loading dotenv files by setting + :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the + files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get('FLASK_SKIP_DOTENV') + + if not val: + return default + + return val.lower() in ('0', 'false', 'no') + + +def _endpoint_from_view_func(view_func): + """Internal helper that returns the default endpoint for a given + function. This always is the function name. + """ + assert view_func is not None, 'expected view func if endpoint ' \ + 'is not provided.' + return view_func.__name__ + + +def stream_with_context(generator_or_function): + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) + except TypeError: + def decorator(*args, **kwargs): + gen = generator_or_function(*args, **kwargs) + return stream_with_context(gen) + return update_wrapper(decorator, generator_or_function) + + def generator(): + ctx = _request_ctx_stack.top + if ctx is None: + raise RuntimeError('Attempted to stream with context but ' + 'there was no context in the first place to keep around.') + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + for item in gen: + yield item + finally: + if hasattr(gen, 'close'): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args): + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) + + +def url_for(endpoint, **values): + """Generates a URL to the given endpoint with the method provided. + + Variable arguments that are unknown to the target endpoint are appended + to the generated URL as query arguments. If the value of a query argument + is ``None``, the whole pair is skipped. In case blueprints are active + you can shortcut references to the same blueprint by prefixing the + local endpoint with a dot (``.``). + + This will reference the index function local to the current blueprint:: + + url_for('.index') + + For more information, head over to the :ref:`Quickstart `. + + To integrate applications, :class:`Flask` has a hook to intercept URL build + errors through :attr:`Flask.url_build_error_handlers`. The `url_for` + function results in a :exc:`~werkzeug.routing.BuildError` when the current + app does not have a URL for the given endpoint and values. When it does, the + :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if + it is not ``None``, which can return a string to use as the result of + `url_for` (instead of `url_for`'s default to raise the + :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. + An example:: + + def external_url_handler(error, endpoint, values): + "Looks up an external URL when `url_for` cannot build a URL." + # This is an example of hooking the build_error_handler. + # Here, lookup_url is some utility function you've built + # which looks up the endpoint in some external URL registry. + url = lookup_url(endpoint, **values) + if url is None: + # External lookup did not have a URL. + # Re-raise the BuildError, in context of original traceback. + exc_type, exc_value, tb = sys.exc_info() + if exc_value is error: + raise exc_type, exc_value, tb + else: + raise error + # url_for will use this result, instead of raising BuildError. + return url + + app.url_build_error_handlers.append(external_url_handler) + + Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and + `endpoint` and `values` are the arguments passed into `url_for`. Note + that this is for building URLs outside the current application, and not for + handling 404 NotFound errors. + + .. versionadded:: 0.10 + The `_scheme` parameter was added. + + .. versionadded:: 0.9 + The `_anchor` and `_method` parameters were added. + + .. versionadded:: 0.9 + Calls :meth:`Flask.handle_build_error` on + :exc:`~werkzeug.routing.BuildError`. + + :param endpoint: the endpoint of the URL (name of the function) + :param values: the variable arguments of the URL rule + :param _external: if set to ``True``, an absolute URL is generated. Server + address can be changed via ``SERVER_NAME`` configuration variable which + defaults to `localhost`. + :param _scheme: a string specifying the desired URL scheme. The `_external` + parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default + behavior uses the same scheme as the current request, or + ``PREFERRED_URL_SCHEME`` from the :ref:`app configuration ` if no + request context is available. As of Werkzeug 0.10, this also can be set + to an empty string to build protocol-relative URLs. + :param _anchor: if provided this is added as anchor to the URL. + :param _method: if provided this explicitly specifies an HTTP method. + """ + appctx = _app_ctx_stack.top + reqctx = _request_ctx_stack.top + + if appctx is None: + raise RuntimeError( + 'Attempted to generate a URL without the application context being' + ' pushed. This has to be executed when application context is' + ' available.' + ) + + # If request specific information is available we have some extra + # features that support "relative" URLs. + if reqctx is not None: + url_adapter = reqctx.url_adapter + blueprint_name = request.blueprint + + if endpoint[:1] == '.': + if blueprint_name is not None: + endpoint = blueprint_name + endpoint + else: + endpoint = endpoint[1:] + + external = values.pop('_external', False) + + # Otherwise go with the url adapter from the appctx and make + # the URLs external by default. + else: + url_adapter = appctx.url_adapter + + if url_adapter is None: + raise RuntimeError( + 'Application was not able to create a URL adapter for request' + ' independent URL generation. You might be able to fix this by' + ' setting the SERVER_NAME config variable.' + ) + + external = values.pop('_external', True) + + anchor = values.pop('_anchor', None) + method = values.pop('_method', None) + scheme = values.pop('_scheme', None) + appctx.app.inject_url_defaults(endpoint, values) + + # This is not the best way to deal with this but currently the + # underlying Werkzeug router does not support overriding the scheme on + # a per build call basis. + old_scheme = None + if scheme is not None: + if not external: + raise ValueError('When specifying _scheme, _external must be True') + old_scheme = url_adapter.url_scheme + url_adapter.url_scheme = scheme + + try: + try: + rv = url_adapter.build(endpoint, values, method=method, + force_external=external) + finally: + if old_scheme is not None: + url_adapter.url_scheme = old_scheme + except BuildError as error: + # We need to inject the values again so that the app callback can + # deal with that sort of stuff. + values['_external'] = external + values['_anchor'] = anchor + values['_method'] = method + values['_scheme'] = scheme + return appctx.app.handle_url_build_error(error, endpoint, values) + + if anchor is not None: + rv += '#' + url_quote(anchor) + return rv + + +def get_template_attribute(template_name, attribute): + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, + attribute) + + +def flash(message, category='message'): + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get('_flashes', []) + flashes.append((category, message)) + session['_flashes'] = flashes + message_flashed.send(current_app._get_current_object(), + message=message, category=category) + + +def get_flashed_messages(with_categories=False, category_filter=[]): + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :ref:`message-flashing-pattern` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: whitelist of categories to limit return values + """ + flashes = _request_ctx_stack.top.flashes + if flashes is None: + _request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \ + if '_flashes' in session else [] + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def send_file(filename_or_fp, mimetype=None, as_attachment=False, + attachment_filename=None, add_etags=True, + cache_timeout=None, conditional=False, last_modified=None): + """Sends the contents of a file to the client. This will use the + most efficient method available and configured. By default it will + try to use the WSGI server's file_wrapper support. Alternatively + you can set the application's :attr:`~Flask.use_x_sendfile` attribute + to ``True`` to directly emit an ``X-Sendfile`` header. This however + requires support of the underlying webserver for ``X-Sendfile``. + + By default it will try to guess the mimetype for you, but you can + also explicitly provide one. For extra security you probably want + to send certain files as attachment (HTML for instance). The mimetype + guessing requires a `filename` or an `attachment_filename` to be + provided. + + ETags will also be attached automatically if a `filename` is provided. You + can turn this off by setting `add_etags=False`. + + If `conditional=True` and `filename` is provided, this method will try to + upgrade the response stream to support range requests. This will allow + the request to be answered with partial content response. + + Please never pass filenames to this function from user sources; + you should use :func:`send_from_directory` instead. + + .. versionadded:: 0.2 + + .. versionadded:: 0.5 + The `add_etags`, `cache_timeout` and `conditional` parameters were + added. The default behavior is now to attach etags. + + .. versionchanged:: 0.7 + mimetype guessing and etag support for file objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. This functionality + will be removed in Flask 1.0 + + .. versionchanged:: 0.9 + cache_timeout pulls its default from application config, when None. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file objects. If + you want to use automatic mimetype and etag support, pass a filepath via + `filename_or_fp` or `attachment_filename`. + + .. versionchanged:: 0.12 + The `attachment_filename` is preferred over `filename` for MIME-type + detection. + + .. versionchanged:: 1.0 + UTF-8 filenames, as specified in `RFC 2231`_, are supported. + + .. _RFC 2231: https://tools.ietf.org/html/rfc2231#section-4 + + :param filename_or_fp: the filename of the file to send. + This is relative to the :attr:`~Flask.root_path` + if a relative path is specified. + Alternatively a file object might be provided in + which case ``X-Sendfile`` might not work and fall + back to the traditional method. Make sure that the + file pointer is positioned at the start of data to + send before calling :func:`send_file`. + :param mimetype: the mimetype of the file if provided. If a file path is + given, auto detection happens as fallback, otherwise an + error will be raised. + :param as_attachment: set to ``True`` if you want to send this file with + a ``Content-Disposition: attachment`` header. + :param attachment_filename: the filename for the attachment if it + differs from the file's filename. + :param add_etags: set to ``False`` to disable attaching of etags. + :param conditional: set to ``True`` to enable conditional responses. + + :param cache_timeout: the timeout in seconds for the headers. When ``None`` + (default), this value is set by + :meth:`~Flask.get_send_file_max_age` of + :data:`~flask.current_app`. + :param last_modified: set the ``Last-Modified`` header to this value, + a :class:`~datetime.datetime` or timestamp. + If a file was passed, this overrides its mtime. + """ + mtime = None + fsize = None + if isinstance(filename_or_fp, string_types): + filename = filename_or_fp + if not os.path.isabs(filename): + filename = os.path.join(current_app.root_path, filename) + file = None + if attachment_filename is None: + attachment_filename = os.path.basename(filename) + else: + file = filename_or_fp + filename = None + + if mimetype is None: + if attachment_filename is not None: + mimetype = mimetypes.guess_type(attachment_filename)[0] \ + or 'application/octet-stream' + + if mimetype is None: + raise ValueError( + 'Unable to infer MIME-type because no filename is available. ' + 'Please set either `attachment_filename`, pass a filepath to ' + '`filename_or_fp` or set your own MIME-type via `mimetype`.' + ) + + headers = Headers() + if as_attachment: + if attachment_filename is None: + raise TypeError('filename unavailable, required for ' + 'sending as attachment') + + try: + attachment_filename = attachment_filename.encode('latin-1') + except UnicodeEncodeError: + filenames = { + 'filename': unicodedata.normalize( + 'NFKD', attachment_filename).encode('latin-1', 'ignore'), + 'filename*': "UTF-8''%s" % url_quote(attachment_filename), + } + else: + filenames = {'filename': attachment_filename} + + headers.add('Content-Disposition', 'attachment', **filenames) + + if current_app.use_x_sendfile and filename: + if file is not None: + file.close() + headers['X-Sendfile'] = filename + fsize = os.path.getsize(filename) + headers['Content-Length'] = fsize + data = None + else: + if file is None: + file = open(filename, 'rb') + mtime = os.path.getmtime(filename) + fsize = os.path.getsize(filename) + headers['Content-Length'] = fsize + data = wrap_file(request.environ, file) + + rv = current_app.response_class(data, mimetype=mimetype, headers=headers, + direct_passthrough=True) + + if last_modified is not None: + rv.last_modified = last_modified + elif mtime is not None: + rv.last_modified = mtime + + rv.cache_control.public = True + if cache_timeout is None: + cache_timeout = current_app.get_send_file_max_age(filename) + if cache_timeout is not None: + rv.cache_control.max_age = cache_timeout + rv.expires = int(time() + cache_timeout) + + if add_etags and filename is not None: + from warnings import warn + + try: + rv.set_etag('%s-%s-%s' % ( + os.path.getmtime(filename), + os.path.getsize(filename), + adler32( + filename.encode('utf-8') if isinstance(filename, text_type) + else filename + ) & 0xffffffff + )) + except OSError: + warn('Access %s failed, maybe it does not exist, so ignore etags in ' + 'headers' % filename, stacklevel=2) + + if conditional: + try: + rv = rv.make_conditional(request, accept_ranges=True, + complete_length=fsize) + except RequestedRangeNotSatisfiable: + if file is not None: + file.close() + raise + # make sure we don't send x-sendfile for servers that + # ignore the 304 status code for x-sendfile. + if rv.status_code == 304: + rv.headers.pop('x-sendfile', None) + return rv + + +def safe_join(directory, *pathnames): + """Safely join `directory` and zero or more untrusted `pathnames` + components. + + Example usage:: + + @app.route('/wiki/') + def wiki_page(filename): + filename = safe_join(app.config['WIKI_FOLDER'], filename) + with open(filename, 'rb') as fd: + content = fd.read() # Read and process the file content... + + :param directory: the trusted base directory. + :param pathnames: the untrusted pathnames relative to that directory. + :raises: :class:`~werkzeug.exceptions.NotFound` if one or more passed + paths fall out of its boundaries. + """ + + parts = [directory] + + for filename in pathnames: + if filename != '': + filename = posixpath.normpath(filename) + + if ( + any(sep in filename for sep in _os_alt_seps) + or os.path.isabs(filename) + or filename == '..' + or filename.startswith('../') + ): + raise NotFound() + + parts.append(filename) + + return posixpath.join(*parts) + + +def send_from_directory(directory, filename, **options): + """Send a file from a given directory with :func:`send_file`. This + is a secure way to quickly expose static files from an upload folder + or something similar. + + Example usage:: + + @app.route('/uploads/') + def download_file(filename): + return send_from_directory(app.config['UPLOAD_FOLDER'], + filename, as_attachment=True) + + .. admonition:: Sending files and Performance + + It is strongly recommended to activate either ``X-Sendfile`` support in + your webserver or (if no authentication happens) to tell the webserver + to serve files for the given path on its own without calling into the + web application for improved performance. + + .. versionadded:: 0.5 + + :param directory: the directory where all the files are stored. + :param filename: the filename relative to that directory to + download. + :param options: optional keyword arguments that are directly + forwarded to :func:`send_file`. + """ + filename = safe_join(directory, filename) + if not os.path.isabs(filename): + filename = os.path.join(current_app.root_path, filename) + try: + if not os.path.isfile(filename): + raise NotFound() + except (TypeError, ValueError): + raise BadRequest() + options.setdefault('conditional', True) + return send_file(filename, **options) + + +def get_root_path(import_name): + """Returns the path to a package or cwd if that cannot be found. This + returns the path of a package or the folder that contains a module. + + Not to be confused with the package path returned by :func:`find_package`. + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + if mod is not None and hasattr(mod, '__file__'): + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main module + # or a main module without path (interactive sessions), go with the + # current working directory. + if loader is None or import_name == '__main__': + return os.getcwd() + + # For .egg, zipimporter does not have get_filename until Python 2.7. + # Some other loaders might exhibit the same behavior. + if hasattr(loader, 'get_filename'): + filepath = loader.get_filename(import_name) + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, '__file__', None) + + # If we don't have a filepath it might be because we are a + # namespace package. In this case we pick the root path from the + # first module that is contained in our package. + if filepath is None: + raise RuntimeError('No root path can be found for the provided ' + 'module "%s". This can happen because the ' + 'module came from an import hook that does ' + 'not provide file name information or because ' + 'it\'s a namespace package. In this case ' + 'the root path needs to be explicitly ' + 'provided.' % import_name) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +def _matching_loader_thinks_module_is_package(loader, mod_name): + """Given the loader that loaded a module and the module this function + attempts to figure out if the given module is actually a package. + """ + # If the loader can tell us if something is a package, we can + # directly ask the loader. + if hasattr(loader, 'is_package'): + return loader.is_package(mod_name) + # importlib's namespace loaders do not have this functionality but + # all the modules it loads are packages, so we can take advantage of + # this information. + elif (loader.__class__.__module__ == '_frozen_importlib' and + loader.__class__.__name__ == 'NamespaceLoader'): + return True + # Otherwise we need to fail with an error that explains what went + # wrong. + raise AttributeError( + ('%s.is_package() method is missing but is required by Flask of ' + 'PEP 302 import hooks. If you do not use import hooks and ' + 'you encounter this error please file a bug against Flask.') % + loader.__class__.__name__) + + +def find_package(import_name): + """Finds a package and returns the prefix (or None if the package is + not installed) as well as the folder that contains the package or + module as a tuple. The package path returned is the module that would + have to be added to the pythonpath in order to make it possible to + import the module. The prefix is the path below which a UNIX like + folder structure exists (lib, share etc.). + """ + root_mod_name = import_name.split('.')[0] + loader = pkgutil.get_loader(root_mod_name) + if loader is None or import_name == '__main__': + # import name is not found, or interactive/main module + package_path = os.getcwd() + else: + # For .egg, zipimporter does not have get_filename until Python 2.7. + if hasattr(loader, 'get_filename'): + filename = loader.get_filename(root_mod_name) + elif hasattr(loader, 'archive'): + # zipimporter's loader.archive points to the .egg or .zip + # archive filename is dropped in call to dirname below. + filename = loader.archive + else: + # At least one loader is missing both get_filename and archive: + # Google App Engine's HardenedModulesHook + # + # Fall back to imports. + __import__(import_name) + filename = sys.modules[import_name].__file__ + package_path = os.path.abspath(os.path.dirname(filename)) + + # In case the root module is a package we need to chop of the + # rightmost part. This needs to go through a helper function + # because of python 3.3 namespace packages. + if _matching_loader_thinks_module_is_package( + loader, root_mod_name): + package_path = os.path.dirname(package_path) + + site_parent, site_folder = os.path.split(package_path) + py_prefix = os.path.abspath(sys.prefix) + if package_path.startswith(py_prefix): + return py_prefix, package_path + elif site_folder.lower() == 'site-packages': + parent, folder = os.path.split(site_parent) + # Windows like installations + if folder.lower() == 'lib': + base_dir = parent + # UNIX like installations + elif os.path.basename(parent).lower() == 'lib': + base_dir = os.path.dirname(parent) + else: + base_dir = site_parent + return base_dir, package_path + return None, package_path + + +class locked_cached_property(object): + """A decorator that converts a function into a lazy property. The + function wrapped is called the first time to retrieve the result + and then that calculated result is used the next time you access + the value. Works like the one in Werkzeug but has a lock for + thread safety. + """ + + def __init__(self, func, name=None, doc=None): + self.__name__ = name or func.__name__ + self.__module__ = func.__module__ + self.__doc__ = doc or func.__doc__ + self.func = func + self.lock = RLock() + + def __get__(self, obj, type=None): + if obj is None: + return self + with self.lock: + value = obj.__dict__.get(self.__name__, _missing) + if value is _missing: + value = self.func(obj) + obj.__dict__[self.__name__] = value + return value + + +class _PackageBoundObject(object): + #: The name of the package or module that this app belongs to. Do not + #: change this once it is set by the constructor. + import_name = None + + #: Location of the template files to be added to the template lookup. + #: ``None`` if templates should not be added. + template_folder = None + + #: Absolute path to the package on the filesystem. Used to look up + #: resources contained in the package. + root_path = None + + def __init__(self, import_name, template_folder=None, root_path=None): + self.import_name = import_name + self.template_folder = template_folder + + if root_path is None: + root_path = get_root_path(self.import_name) + + self.root_path = root_path + self._static_folder = None + self._static_url_path = None + + def _get_static_folder(self): + if self._static_folder is not None: + return os.path.join(self.root_path, self._static_folder) + + def _set_static_folder(self, value): + self._static_folder = value + + static_folder = property( + _get_static_folder, _set_static_folder, + doc='The absolute path to the configured static folder.' + ) + del _get_static_folder, _set_static_folder + + def _get_static_url_path(self): + if self._static_url_path is not None: + return self._static_url_path + + if self.static_folder is not None: + return '/' + os.path.basename(self.static_folder) + + def _set_static_url_path(self, value): + self._static_url_path = value + + static_url_path = property( + _get_static_url_path, _set_static_url_path, + doc='The URL prefix that the static route will be registered for.' + ) + del _get_static_url_path, _set_static_url_path + + @property + def has_static_folder(self): + """This is ``True`` if the package bound object's container has a + folder for static files. + + .. versionadded:: 0.5 + """ + return self.static_folder is not None + + @locked_cached_property + def jinja_loader(self): + """The Jinja loader for this package bound object. + + .. versionadded:: 0.5 + """ + if self.template_folder is not None: + return FileSystemLoader(os.path.join(self.root_path, + self.template_folder)) + + def get_send_file_max_age(self, filename): + """Provides default cache_timeout for the :func:`send_file` functions. + + By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from + the configuration of :data:`~flask.current_app`. + + Static file functions such as :func:`send_from_directory` use this + function, and :func:`send_file` calls this function on + :data:`~flask.current_app` when the given cache_timeout is ``None``. If a + cache_timeout is given in :func:`send_file`, that timeout is used; + otherwise, this method is called. + + This allows subclasses to change the behavior when sending files based + on the filename. For example, to set the cache timeout for .js files + to 60 seconds:: + + class MyFlask(flask.Flask): + def get_send_file_max_age(self, name): + if name.lower().endswith('.js'): + return 60 + return flask.Flask.get_send_file_max_age(self, name) + + .. versionadded:: 0.9 + """ + return total_seconds(current_app.send_file_max_age_default) + + def send_static_file(self, filename): + """Function used internally to send static files from the static + folder to the browser. + + .. versionadded:: 0.5 + """ + if not self.has_static_folder: + raise RuntimeError('No static folder for this object') + # Ensure get_send_file_max_age is called in all cases. + # Here, we ensure get_send_file_max_age is called for Blueprints. + cache_timeout = self.get_send_file_max_age(filename) + return send_from_directory(self.static_folder, filename, + cache_timeout=cache_timeout) + + def open_resource(self, resource, mode='rb'): + """Opens a resource from the application's resource folder. To see + how this works, consider the following folder structure:: + + /myapplication.py + /schema.sql + /static + /style.css + /templates + /layout.html + /index.html + + If you want to open the :file:`schema.sql` file you would do the + following:: + + with app.open_resource('schema.sql') as f: + contents = f.read() + do_something_with(contents) + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + if mode not in ('r', 'rb'): + raise ValueError('Resources can only be opened for reading') + return open(os.path.join(self.root_path, resource), mode) + + +def total_seconds(td): + """Returns the total seconds from a timedelta object. + + :param timedelta td: the timedelta to be converted in seconds + + :returns: number of seconds + :rtype: int + """ + return td.days * 60 * 60 * 24 + td.seconds + + +def is_ip(value): + """Determine if the given string is an IP address. + + Python 2 on Windows doesn't provide ``inet_pton``, so this only + checks IPv4 addresses in that environment. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + """ + if PY2 and os.name == 'nt': + try: + socket.inet_aton(value) + return True + except socket.error: + return False + + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except socket.error: + pass + else: + return True + + return False diff --git a/flask/venv/lib/python3.6/site-packages/flask/json/__init__.py b/flask/venv/lib/python3.6/site-packages/flask/json/__init__.py new file mode 100644 index 0000000..fbe6b92 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/flask/json/__init__.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- +""" +flask.json +~~~~~~~~~~ + +:copyright: © 2010 by the Pallets team. +:license: BSD, see LICENSE for more details. +""" +import codecs +import io +import uuid +from datetime import date, datetime +from flask.globals import current_app, request +from flask._compat import text_type, PY2 + +from werkzeug.http import http_date +from jinja2 import Markup + +# Use the same json implementation as itsdangerous on which we +# depend anyways. +from itsdangerous import json as _json + + +# Figure out if simplejson escapes slashes. This behavior was changed +# from one version to another without reason. +_slash_escape = '\\/' not in _json.dumps('/') + + +__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump', + 'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder', + 'jsonify'] + + +def _wrap_reader_for_text(fp, encoding): + if isinstance(fp.read(0), bytes): + fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) + return fp + + +def _wrap_writer_for_text(fp, encoding): + try: + fp.write('') + except TypeError: + fp = io.TextIOWrapper(fp, encoding) + return fp + + +class JSONEncoder(_json.JSONEncoder): + """The default Flask JSON encoder. This one extends the default simplejson + encoder by also supporting ``datetime`` objects, ``UUID`` as well as + ``Markup`` objects which are serialized as RFC 822 datetime strings (same + as the HTTP date format). In order to support more data types override the + :meth:`default` method. + """ + + def default(self, o): + """Implement this method in a subclass such that it returns a + serializable object for ``o``, or calls the base implementation (to + raise a :exc:`TypeError`). + + For example, to support arbitrary iterators, you could implement + default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + return JSONEncoder.default(self, o) + """ + if isinstance(o, datetime): + return http_date(o.utctimetuple()) + if isinstance(o, date): + return http_date(o.timetuple()) + if isinstance(o, uuid.UUID): + return str(o) + if hasattr(o, '__html__'): + return text_type(o.__html__()) + return _json.JSONEncoder.default(self, o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. This one does not change the behavior from + the default simplejson decoder. Consult the :mod:`json` documentation + for more information. This decoder is not only used for the load + functions of this module but also :attr:`~flask.Request`. + """ + + +def _dump_arg_defaults(kwargs): + """Inject default arguments for dump functions.""" + if current_app: + bp = current_app.blueprints.get(request.blueprint) if request else None + kwargs.setdefault( + 'cls', + bp.json_encoder if bp and bp.json_encoder + else current_app.json_encoder + ) + + if not current_app.config['JSON_AS_ASCII']: + kwargs.setdefault('ensure_ascii', False) + + kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS']) + else: + kwargs.setdefault('sort_keys', True) + kwargs.setdefault('cls', JSONEncoder) + + +def _load_arg_defaults(kwargs): + """Inject default arguments for load functions.""" + if current_app: + bp = current_app.blueprints.get(request.blueprint) if request else None + kwargs.setdefault( + 'cls', + bp.json_decoder if bp and bp.json_decoder + else current_app.json_decoder + ) + else: + kwargs.setdefault('cls', JSONDecoder) + + +def detect_encoding(data): + """Detect which UTF codec was used to encode the given bytes. + + The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is + accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big + or little endian. Some editors or libraries may prepend a BOM. + + :param data: Bytes in unknown UTF encoding. + :return: UTF encoding name + """ + head = data[:4] + + if head[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' + + if b'\x00' not in head: + return 'utf-8' + + if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): + return 'utf-32' + + if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): + return 'utf-16' + + if len(head) == 4: + if head[:3] == b'\x00\x00\x00': + return 'utf-32-be' + + if head[::2] == b'\x00\x00': + return 'utf-16-be' + + if head[1:] == b'\x00\x00\x00': + return 'utf-32-le' + + if head[1::2] == b'\x00\x00': + return 'utf-16-le' + + if len(head) == 2: + return 'utf-16-be' if head.startswith(b'\x00') else 'utf-16-le' + + return 'utf-8' + + +def dumps(obj, **kwargs): + """Serialize ``obj`` to a JSON formatted ``str`` by using the application's + configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an + application on the stack. + + This function can return ``unicode`` strings or ascii-only bytestrings by + default which coerce into unicode strings automatically. That behavior by + default is controlled by the ``JSON_AS_ASCII`` configuration variable + and can be overridden by the simplejson ``ensure_ascii`` parameter. + """ + _dump_arg_defaults(kwargs) + encoding = kwargs.pop('encoding', None) + rv = _json.dumps(obj, **kwargs) + if encoding is not None and isinstance(rv, text_type): + rv = rv.encode(encoding) + return rv + + +def dump(obj, fp, **kwargs): + """Like :func:`dumps` but writes into a file object.""" + _dump_arg_defaults(kwargs) + encoding = kwargs.pop('encoding', None) + if encoding is not None: + fp = _wrap_writer_for_text(fp, encoding) + _json.dump(obj, fp, **kwargs) + + +def loads(s, **kwargs): + """Unserialize a JSON object from a string ``s`` by using the application's + configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an + application on the stack. + """ + _load_arg_defaults(kwargs) + if isinstance(s, bytes): + encoding = kwargs.pop('encoding', None) + if encoding is None: + encoding = detect_encoding(s) + s = s.decode(encoding) + return _json.loads(s, **kwargs) + + +def load(fp, **kwargs): + """Like :func:`loads` but reads from a file object. + """ + _load_arg_defaults(kwargs) + if not PY2: + fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8') + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj, **kwargs): + """Works exactly like :func:`dumps` but is safe for use in ``' + self.assertEqual( + r'"\u003c/script\u003e\u003cscript\u003e' + r'alert(\"gotcha\")\u003c/script\u003e"', + self.encoder.encode(bad_string)) + self.assertEqual( + bad_string, self.decoder.decode( + self.encoder.encode(bad_string))) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_errors.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_errors.py new file mode 100644 index 0000000..d573825 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_errors.py @@ -0,0 +1,68 @@ +import sys, pickle +from unittest import TestCase + +import simplejson as json +from simplejson.compat import text_type, b + +class TestErrors(TestCase): + def test_string_keys_error(self): + data = [{'a': 'A', 'b': (2, 4), 'c': 3.0, ('d',): 'D tuple'}] + try: + json.dumps(data) + except TypeError: + err = sys.exc_info()[1] + else: + self.fail('Expected TypeError') + self.assertEqual(str(err), + 'keys must be str, int, float, bool or None, not tuple') + + def test_not_serializable(self): + try: + json.dumps(json) + except TypeError: + err = sys.exc_info()[1] + else: + self.fail('Expected TypeError') + self.assertEqual(str(err), + 'Object of type module is not JSON serializable') + + def test_decode_error(self): + err = None + try: + json.loads('{}\na\nb') + except json.JSONDecodeError: + err = sys.exc_info()[1] + else: + self.fail('Expected JSONDecodeError') + self.assertEqual(err.lineno, 2) + self.assertEqual(err.colno, 1) + self.assertEqual(err.endlineno, 3) + self.assertEqual(err.endcolno, 2) + + def test_scan_error(self): + err = None + for t in (text_type, b): + try: + json.loads(t('{"asdf": "')) + except json.JSONDecodeError: + err = sys.exc_info()[1] + else: + self.fail('Expected JSONDecodeError') + self.assertEqual(err.lineno, 1) + self.assertEqual(err.colno, 10) + + def test_error_is_pickable(self): + err = None + try: + json.loads('{}\na\nb') + except json.JSONDecodeError: + err = sys.exc_info()[1] + else: + self.fail('Expected JSONDecodeError') + s = pickle.dumps(err) + e = pickle.loads(s) + + self.assertEqual(err.msg, e.msg) + self.assertEqual(err.doc, e.doc) + self.assertEqual(err.pos, e.pos) + self.assertEqual(err.end, e.end) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_fail.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_fail.py new file mode 100644 index 0000000..788f3a5 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_fail.py @@ -0,0 +1,176 @@ +import sys +from unittest import TestCase + +import simplejson as json + +# 2007-10-05 +JSONDOCS = [ + # http://json.org/JSON_checker/test/fail1.json + '"A JSON payload should be an object or array, not a string."', + # http://json.org/JSON_checker/test/fail2.json + '["Unclosed array"', + # http://json.org/JSON_checker/test/fail3.json + '{unquoted_key: "keys must be quoted"}', + # http://json.org/JSON_checker/test/fail4.json + '["extra comma",]', + # http://json.org/JSON_checker/test/fail5.json + '["double extra comma",,]', + # http://json.org/JSON_checker/test/fail6.json + '[ , "<-- missing value"]', + # http://json.org/JSON_checker/test/fail7.json + '["Comma after the close"],', + # http://json.org/JSON_checker/test/fail8.json + '["Extra close"]]', + # http://json.org/JSON_checker/test/fail9.json + '{"Extra comma": true,}', + # http://json.org/JSON_checker/test/fail10.json + '{"Extra value after close": true} "misplaced quoted value"', + # http://json.org/JSON_checker/test/fail11.json + '{"Illegal expression": 1 + 2}', + # http://json.org/JSON_checker/test/fail12.json + '{"Illegal invocation": alert()}', + # http://json.org/JSON_checker/test/fail13.json + '{"Numbers cannot have leading zeroes": 013}', + # http://json.org/JSON_checker/test/fail14.json + '{"Numbers cannot be hex": 0x14}', + # http://json.org/JSON_checker/test/fail15.json + '["Illegal backslash escape: \\x15"]', + # http://json.org/JSON_checker/test/fail16.json + '[\\naked]', + # http://json.org/JSON_checker/test/fail17.json + '["Illegal backslash escape: \\017"]', + # http://json.org/JSON_checker/test/fail18.json + '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', + # http://json.org/JSON_checker/test/fail19.json + '{"Missing colon" null}', + # http://json.org/JSON_checker/test/fail20.json + '{"Double colon":: null}', + # http://json.org/JSON_checker/test/fail21.json + '{"Comma instead of colon", null}', + # http://json.org/JSON_checker/test/fail22.json + '["Colon instead of comma": false]', + # http://json.org/JSON_checker/test/fail23.json + '["Bad value", truth]', + # http://json.org/JSON_checker/test/fail24.json + "['single quote']", + # http://json.org/JSON_checker/test/fail25.json + '["\ttab\tcharacter\tin\tstring\t"]', + # http://json.org/JSON_checker/test/fail26.json + '["tab\\ character\\ in\\ string\\ "]', + # http://json.org/JSON_checker/test/fail27.json + '["line\nbreak"]', + # http://json.org/JSON_checker/test/fail28.json + '["line\\\nbreak"]', + # http://json.org/JSON_checker/test/fail29.json + '[0e]', + # http://json.org/JSON_checker/test/fail30.json + '[0e+]', + # http://json.org/JSON_checker/test/fail31.json + '[0e+-1]', + # http://json.org/JSON_checker/test/fail32.json + '{"Comma instead if closing brace": true,', + # http://json.org/JSON_checker/test/fail33.json + '["mismatch"}', + # http://code.google.com/p/simplejson/issues/detail?id=3 + u'["A\u001FZ control characters in string"]', + # misc based on coverage + '{', + '{]', + '{"foo": "bar"]', + '{"foo": "bar"', + 'nul', + 'nulx', + '-', + '-x', + '-e', + '-e0', + '-Infinite', + '-Inf', + 'Infinit', + 'Infinite', + 'NaM', + 'NuN', + 'falsy', + 'fal', + 'trug', + 'tru', + '1e', + '1ex', + '1e-', + '1e-x', +] + +SKIPS = { + 1: "why not have a string payload?", + 18: "spec doesn't specify any nesting limitations", +} + +class TestFail(TestCase): + def test_failures(self): + for idx, doc in enumerate(JSONDOCS): + idx = idx + 1 + if idx in SKIPS: + json.loads(doc) + continue + try: + json.loads(doc) + except json.JSONDecodeError: + pass + else: + self.fail("Expected failure for fail%d.json: %r" % (idx, doc)) + + def test_array_decoder_issue46(self): + # http://code.google.com/p/simplejson/issues/detail?id=46 + for doc in [u'[,]', '[,]']: + try: + json.loads(doc) + except json.JSONDecodeError: + e = sys.exc_info()[1] + self.assertEqual(e.pos, 1) + self.assertEqual(e.lineno, 1) + self.assertEqual(e.colno, 2) + except Exception: + e = sys.exc_info()[1] + self.fail("Unexpected exception raised %r %s" % (e, e)) + else: + self.fail("Unexpected success parsing '[,]'") + + def test_truncated_input(self): + test_cases = [ + ('', 'Expecting value', 0), + ('[', "Expecting value or ']'", 1), + ('[42', "Expecting ',' delimiter", 3), + ('[42,', 'Expecting value', 4), + ('["', 'Unterminated string starting at', 1), + ('["spam', 'Unterminated string starting at', 1), + ('["spam"', "Expecting ',' delimiter", 7), + ('["spam",', 'Expecting value', 8), + ('{', 'Expecting property name enclosed in double quotes', 1), + ('{"', 'Unterminated string starting at', 1), + ('{"spam', 'Unterminated string starting at', 1), + ('{"spam"', "Expecting ':' delimiter", 7), + ('{"spam":', 'Expecting value', 8), + ('{"spam":42', "Expecting ',' delimiter", 10), + ('{"spam":42,', 'Expecting property name enclosed in double quotes', + 11), + ('"', 'Unterminated string starting at', 0), + ('"spam', 'Unterminated string starting at', 0), + ('[,', "Expecting value", 1), + ] + for data, msg, idx in test_cases: + try: + json.loads(data) + except json.JSONDecodeError: + e = sys.exc_info()[1] + self.assertEqual( + e.msg[:len(msg)], + msg, + "%r doesn't start with %r for %r" % (e.msg, msg, data)) + self.assertEqual( + e.pos, idx, + "pos %r != %r for %r" % (e.pos, idx, data)) + except Exception: + e = sys.exc_info()[1] + self.fail("Unexpected exception raised %r %s" % (e, e)) + else: + self.fail("Unexpected success parsing '%r'" % (data,)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_float.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_float.py new file mode 100644 index 0000000..e382ec2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_float.py @@ -0,0 +1,35 @@ +import math +from unittest import TestCase +from simplejson.compat import long_type, text_type +import simplejson as json +from simplejson.decoder import NaN, PosInf, NegInf + +class TestFloat(TestCase): + def test_degenerates_allow(self): + for inf in (PosInf, NegInf): + self.assertEqual(json.loads(json.dumps(inf)), inf) + # Python 2.5 doesn't have math.isnan + nan = json.loads(json.dumps(NaN)) + self.assertTrue((0 + nan) != nan) + + def test_degenerates_ignore(self): + for f in (PosInf, NegInf, NaN): + self.assertEqual(json.loads(json.dumps(f, ignore_nan=True)), None) + + def test_degenerates_deny(self): + for f in (PosInf, NegInf, NaN): + self.assertRaises(ValueError, json.dumps, f, allow_nan=False) + + def test_floats(self): + for num in [1617161771.7650001, math.pi, math.pi**100, + math.pi**-100, 3.1]: + self.assertEqual(float(json.dumps(num)), num) + self.assertEqual(json.loads(json.dumps(num)), num) + self.assertEqual(json.loads(text_type(json.dumps(num))), num) + + def test_ints(self): + for num in [1, long_type(1), 1<<32, 1<<64]: + self.assertEqual(json.dumps(num), str(num)) + self.assertEqual(int(json.dumps(num)), num) + self.assertEqual(json.loads(json.dumps(num)), num) + self.assertEqual(json.loads(text_type(json.dumps(num))), num) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_for_json.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_for_json.py new file mode 100644 index 0000000..b791b88 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_for_json.py @@ -0,0 +1,97 @@ +import unittest +import simplejson as json + + +class ForJson(object): + def for_json(self): + return {'for_json': 1} + + +class NestedForJson(object): + def for_json(self): + return {'nested': ForJson()} + + +class ForJsonList(object): + def for_json(self): + return ['list'] + + +class DictForJson(dict): + def for_json(self): + return {'alpha': 1} + + +class ListForJson(list): + def for_json(self): + return ['list'] + + +class TestForJson(unittest.TestCase): + def assertRoundTrip(self, obj, other, for_json=True): + if for_json is None: + # None will use the default + s = json.dumps(obj) + else: + s = json.dumps(obj, for_json=for_json) + self.assertEqual( + json.loads(s), + other) + + def test_for_json_encodes_stand_alone_object(self): + self.assertRoundTrip( + ForJson(), + ForJson().for_json()) + + def test_for_json_encodes_object_nested_in_dict(self): + self.assertRoundTrip( + {'hooray': ForJson()}, + {'hooray': ForJson().for_json()}) + + def test_for_json_encodes_object_nested_in_list_within_dict(self): + self.assertRoundTrip( + {'list': [0, ForJson(), 2, 3]}, + {'list': [0, ForJson().for_json(), 2, 3]}) + + def test_for_json_encodes_object_nested_within_object(self): + self.assertRoundTrip( + NestedForJson(), + {'nested': {'for_json': 1}}) + + def test_for_json_encodes_list(self): + self.assertRoundTrip( + ForJsonList(), + ForJsonList().for_json()) + + def test_for_json_encodes_list_within_object(self): + self.assertRoundTrip( + {'nested': ForJsonList()}, + {'nested': ForJsonList().for_json()}) + + def test_for_json_encodes_dict_subclass(self): + self.assertRoundTrip( + DictForJson(a=1), + DictForJson(a=1).for_json()) + + def test_for_json_encodes_list_subclass(self): + self.assertRoundTrip( + ListForJson(['l']), + ListForJson(['l']).for_json()) + + def test_for_json_ignored_if_not_true_with_dict_subclass(self): + for for_json in (None, False): + self.assertRoundTrip( + DictForJson(a=1), + {'a': 1}, + for_json=for_json) + + def test_for_json_ignored_if_not_true_with_list_subclass(self): + for for_json in (None, False): + self.assertRoundTrip( + ListForJson(['l']), + ['l'], + for_json=for_json) + + def test_raises_typeerror_if_for_json_not_true_with_object(self): + self.assertRaises(TypeError, json.dumps, ForJson()) + self.assertRaises(TypeError, json.dumps, ForJson(), for_json=False) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_indent.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_indent.py new file mode 100644 index 0000000..cea25a5 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_indent.py @@ -0,0 +1,86 @@ +from unittest import TestCase +import textwrap + +import simplejson as json +from simplejson.compat import StringIO + +class TestIndent(TestCase): + def test_indent(self): + h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', + 'i-vhbjkhnth', + {'nifty': 87}, {'field': 'yes', 'morefield': False} ] + + expect = textwrap.dedent("""\ + [ + \t[ + \t\t"blorpie" + \t], + \t[ + \t\t"whoops" + \t], + \t[], + \t"d-shtaeou", + \t"d-nthiouh", + \t"i-vhbjkhnth", + \t{ + \t\t"nifty": 87 + \t}, + \t{ + \t\t"field": "yes", + \t\t"morefield": false + \t} + ]""") + + + d1 = json.dumps(h) + d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': ')) + d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': ')) + d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': ')) + + h1 = json.loads(d1) + h2 = json.loads(d2) + h3 = json.loads(d3) + h4 = json.loads(d4) + + self.assertEqual(h1, h) + self.assertEqual(h2, h) + self.assertEqual(h3, h) + self.assertEqual(h4, h) + self.assertEqual(d3, expect.replace('\t', ' ')) + self.assertEqual(d4, expect.replace('\t', ' ')) + # NOTE: Python 2.4 textwrap.dedent converts tabs to spaces, + # so the following is expected to fail. Python 2.4 is not a + # supported platform in simplejson 2.1.0+. + self.assertEqual(d2, expect) + + def test_indent0(self): + h = {3: 1} + def check(indent, expected): + d1 = json.dumps(h, indent=indent) + self.assertEqual(d1, expected) + + sio = StringIO() + json.dump(h, sio, indent=indent) + self.assertEqual(sio.getvalue(), expected) + + # indent=0 should emit newlines + check(0, '{\n"3": 1\n}') + # indent=None is more compact + check(None, '{"3": 1}') + + def test_separators(self): + lst = [1,2,3,4] + expect = '[\n1,\n2,\n3,\n4\n]' + expect_spaces = '[\n1, \n2, \n3, \n4\n]' + # Ensure that separators still works + self.assertEqual( + expect_spaces, + json.dumps(lst, indent=0, separators=(', ', ': '))) + # Force the new defaults + self.assertEqual( + expect, + json.dumps(lst, indent=0, separators=(',', ': '))) + # Added in 2.1.4 + self.assertEqual( + expect, + json.dumps(lst, indent=0)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_item_sort_key.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_item_sort_key.py new file mode 100644 index 0000000..98971b8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_item_sort_key.py @@ -0,0 +1,27 @@ +from unittest import TestCase + +import simplejson as json +from operator import itemgetter + +class TestItemSortKey(TestCase): + def test_simple_first(self): + a = {'a': 1, 'c': 5, 'jack': 'jill', 'pick': 'axe', 'array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'} + self.assertEqual( + '{"a": 1, "c": 5, "crate": "dog", "jack": "jill", "pick": "axe", "zeak": "oh", "array": [1, 5, 6, 9], "tuple": [83, 12, 3]}', + json.dumps(a, item_sort_key=json.simple_first)) + + def test_case(self): + a = {'a': 1, 'c': 5, 'Jack': 'jill', 'pick': 'axe', 'Array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'} + self.assertEqual( + '{"Array": [1, 5, 6, 9], "Jack": "jill", "a": 1, "c": 5, "crate": "dog", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}', + json.dumps(a, item_sort_key=itemgetter(0))) + self.assertEqual( + '{"a": 1, "Array": [1, 5, 6, 9], "c": 5, "crate": "dog", "Jack": "jill", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}', + json.dumps(a, item_sort_key=lambda kv: kv[0].lower())) + + def test_item_sort_key_value(self): + # https://github.com/simplejson/simplejson/issues/173 + a = {'a': 1, 'b': 0} + self.assertEqual( + '{"b": 0, "a": 1}', + json.dumps(a, item_sort_key=lambda kv: kv[1])) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_iterable.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_iterable.py new file mode 100644 index 0000000..35d3e75 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_iterable.py @@ -0,0 +1,31 @@ +import unittest +from simplejson.compat import StringIO + +import simplejson as json + +def iter_dumps(obj, **kw): + return ''.join(json.JSONEncoder(**kw).iterencode(obj)) + +def sio_dump(obj, **kw): + sio = StringIO() + json.dumps(obj, **kw) + return sio.getvalue() + +class TestIterable(unittest.TestCase): + def test_iterable(self): + for l in ([], [1], [1, 2], [1, 2, 3]): + for opts in [{}, {'indent': 2}]: + for dumps in (json.dumps, iter_dumps, sio_dump): + expect = dumps(l, **opts) + default_expect = dumps(sum(l), **opts) + # Default is False + self.assertRaises(TypeError, dumps, iter(l), **opts) + self.assertRaises(TypeError, dumps, iter(l), iterable_as_array=False, **opts) + self.assertEqual(expect, dumps(iter(l), iterable_as_array=True, **opts)) + # Ensure that the "default" gets called + self.assertEqual(default_expect, dumps(iter(l), default=sum, **opts)) + self.assertEqual(default_expect, dumps(iter(l), iterable_as_array=False, default=sum, **opts)) + # Ensure that the "default" does not get called + self.assertEqual( + expect, + dumps(iter(l), iterable_as_array=True, default=sum, **opts)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_namedtuple.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_namedtuple.py new file mode 100644 index 0000000..4387894 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_namedtuple.py @@ -0,0 +1,122 @@ +from __future__ import absolute_import +import unittest +import simplejson as json +from simplejson.compat import StringIO + +try: + from collections import namedtuple +except ImportError: + class Value(tuple): + def __new__(cls, *args): + return tuple.__new__(cls, args) + + def _asdict(self): + return {'value': self[0]} + class Point(tuple): + def __new__(cls, *args): + return tuple.__new__(cls, args) + + def _asdict(self): + return {'x': self[0], 'y': self[1]} +else: + Value = namedtuple('Value', ['value']) + Point = namedtuple('Point', ['x', 'y']) + +class DuckValue(object): + def __init__(self, *args): + self.value = Value(*args) + + def _asdict(self): + return self.value._asdict() + +class DuckPoint(object): + def __init__(self, *args): + self.point = Point(*args) + + def _asdict(self): + return self.point._asdict() + +class DeadDuck(object): + _asdict = None + +class DeadDict(dict): + _asdict = None + +CONSTRUCTORS = [ + lambda v: v, + lambda v: [v], + lambda v: [{'key': v}], +] + +class TestNamedTuple(unittest.TestCase): + def test_namedtuple_dumps(self): + for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]: + d = v._asdict() + self.assertEqual(d, json.loads(json.dumps(v))) + self.assertEqual( + d, + json.loads(json.dumps(v, namedtuple_as_object=True))) + self.assertEqual(d, json.loads(json.dumps(v, tuple_as_array=False))) + self.assertEqual( + d, + json.loads(json.dumps(v, namedtuple_as_object=True, + tuple_as_array=False))) + + def test_namedtuple_dumps_false(self): + for v in [Value(1), Point(1, 2)]: + l = list(v) + self.assertEqual( + l, + json.loads(json.dumps(v, namedtuple_as_object=False))) + self.assertRaises(TypeError, json.dumps, v, + tuple_as_array=False, namedtuple_as_object=False) + + def test_namedtuple_dump(self): + for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]: + d = v._asdict() + sio = StringIO() + json.dump(v, sio) + self.assertEqual(d, json.loads(sio.getvalue())) + sio = StringIO() + json.dump(v, sio, namedtuple_as_object=True) + self.assertEqual( + d, + json.loads(sio.getvalue())) + sio = StringIO() + json.dump(v, sio, tuple_as_array=False) + self.assertEqual(d, json.loads(sio.getvalue())) + sio = StringIO() + json.dump(v, sio, namedtuple_as_object=True, + tuple_as_array=False) + self.assertEqual( + d, + json.loads(sio.getvalue())) + + def test_namedtuple_dump_false(self): + for v in [Value(1), Point(1, 2)]: + l = list(v) + sio = StringIO() + json.dump(v, sio, namedtuple_as_object=False) + self.assertEqual( + l, + json.loads(sio.getvalue())) + self.assertRaises(TypeError, json.dump, v, StringIO(), + tuple_as_array=False, namedtuple_as_object=False) + + def test_asdict_not_callable_dump(self): + for f in CONSTRUCTORS: + self.assertRaises(TypeError, + json.dump, f(DeadDuck()), StringIO(), namedtuple_as_object=True) + sio = StringIO() + json.dump(f(DeadDict()), sio, namedtuple_as_object=True) + self.assertEqual( + json.dumps(f({})), + sio.getvalue()) + + def test_asdict_not_callable_dumps(self): + for f in CONSTRUCTORS: + self.assertRaises(TypeError, + json.dumps, f(DeadDuck()), namedtuple_as_object=True) + self.assertEqual( + json.dumps(f({})), + json.dumps(f(DeadDict()), namedtuple_as_object=True)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass1.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass1.py new file mode 100644 index 0000000..f0b5b10 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass1.py @@ -0,0 +1,71 @@ +from unittest import TestCase + +import simplejson as json + +# from http://json.org/JSON_checker/test/pass1.json +JSON = r''' +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact": [1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] +''' + +class TestPass1(TestCase): + def test_parse(self): + # test in/out equivalence and parsing + res = json.loads(JSON) + out = json.dumps(res) + self.assertEqual(res, json.loads(out)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass2.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass2.py new file mode 100644 index 0000000..5d812b3 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass2.py @@ -0,0 +1,14 @@ +from unittest import TestCase +import simplejson as json + +# from http://json.org/JSON_checker/test/pass2.json +JSON = r''' +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] +''' + +class TestPass2(TestCase): + def test_parse(self): + # test in/out equivalence and parsing + res = json.loads(JSON) + out = json.dumps(res) + self.assertEqual(res, json.loads(out)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass3.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass3.py new file mode 100644 index 0000000..821d60b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_pass3.py @@ -0,0 +1,20 @@ +from unittest import TestCase + +import simplejson as json + +# from http://json.org/JSON_checker/test/pass3.json +JSON = r''' +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} +''' + +class TestPass3(TestCase): + def test_parse(self): + # test in/out equivalence and parsing + res = json.loads(JSON) + out = json.dumps(res) + self.assertEqual(res, json.loads(out)) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_raw_json.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_raw_json.py new file mode 100644 index 0000000..1dfcc2c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_raw_json.py @@ -0,0 +1,47 @@ +import unittest +import simplejson as json + +dct1 = { + 'key1': 'value1' +} + +dct2 = { + 'key2': 'value2', + 'd1': dct1 +} + +dct3 = { + 'key2': 'value2', + 'd1': json.dumps(dct1) +} + +dct4 = { + 'key2': 'value2', + 'd1': json.RawJSON(json.dumps(dct1)) +} + + +class TestRawJson(unittest.TestCase): + + def test_normal_str(self): + self.assertNotEqual(json.dumps(dct2), json.dumps(dct3)) + + def test_raw_json_str(self): + self.assertEqual(json.dumps(dct2), json.dumps(dct4)) + self.assertEqual(dct2, json.loads(json.dumps(dct4))) + + def test_list(self): + self.assertEqual( + json.dumps([dct2]), + json.dumps([json.RawJSON(json.dumps(dct2))])) + self.assertEqual( + [dct2], + json.loads(json.dumps([json.RawJSON(json.dumps(dct2))]))) + + def test_direct(self): + self.assertEqual( + json.dumps(dct2), + json.dumps(json.RawJSON(json.dumps(dct2)))) + self.assertEqual( + dct2, + json.loads(json.dumps(json.RawJSON(json.dumps(dct2))))) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_recursion.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_recursion.py new file mode 100644 index 0000000..662eb66 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_recursion.py @@ -0,0 +1,67 @@ +from unittest import TestCase + +import simplejson as json + +class JSONTestObject: + pass + + +class RecursiveJSONEncoder(json.JSONEncoder): + recurse = False + def default(self, o): + if o is JSONTestObject: + if self.recurse: + return [JSONTestObject] + else: + return 'JSONTestObject' + return json.JSONEncoder.default(o) + + +class TestRecursion(TestCase): + def test_listrecursion(self): + x = [] + x.append(x) + try: + json.dumps(x) + except ValueError: + pass + else: + self.fail("didn't raise ValueError on list recursion") + x = [] + y = [x] + x.append(y) + try: + json.dumps(x) + except ValueError: + pass + else: + self.fail("didn't raise ValueError on alternating list recursion") + y = [] + x = [y, y] + # ensure that the marker is cleared + json.dumps(x) + + def test_dictrecursion(self): + x = {} + x["test"] = x + try: + json.dumps(x) + except ValueError: + pass + else: + self.fail("didn't raise ValueError on dict recursion") + x = {} + y = {"a": x, "b": x} + # ensure that the marker is cleared + json.dumps(y) + + def test_defaultrecursion(self): + enc = RecursiveJSONEncoder() + self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"') + enc.recurse = True + try: + enc.encode(JSONTestObject) + except ValueError: + pass + else: + self.fail("didn't raise ValueError on default recursion") diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_scanstring.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_scanstring.py new file mode 100644 index 0000000..d5de180 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_scanstring.py @@ -0,0 +1,196 @@ +import sys +from unittest import TestCase + +import simplejson as json +import simplejson.decoder +from simplejson.compat import b, PY3 + +class TestScanString(TestCase): + # The bytes type is intentionally not used in most of these tests + # under Python 3 because the decoder immediately coerces to str before + # calling scanstring. In Python 2 we are testing the code paths + # for both unicode and str. + # + # The reason this is done is because Python 3 would require + # entirely different code paths for parsing bytes and str. + # + def test_py_scanstring(self): + self._test_scanstring(simplejson.decoder.py_scanstring) + + def test_c_scanstring(self): + if not simplejson.decoder.c_scanstring: + return + self._test_scanstring(simplejson.decoder.c_scanstring) + + self.assertTrue(isinstance(simplejson.decoder.c_scanstring('""', 0)[0], str)) + + def _test_scanstring(self, scanstring): + if sys.maxunicode == 65535: + self.assertEqual( + scanstring(u'"z\U0001d120x"', 1, None, True), + (u'z\U0001d120x', 6)) + else: + self.assertEqual( + scanstring(u'"z\U0001d120x"', 1, None, True), + (u'z\U0001d120x', 5)) + + self.assertEqual( + scanstring('"\\u007b"', 1, None, True), + (u'{', 8)) + + self.assertEqual( + scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True), + (u'A JSON payload should be an object or array, not a string.', 60)) + + self.assertEqual( + scanstring('["Unclosed array"', 2, None, True), + (u'Unclosed array', 17)) + + self.assertEqual( + scanstring('["extra comma",]', 2, None, True), + (u'extra comma', 14)) + + self.assertEqual( + scanstring('["double extra comma",,]', 2, None, True), + (u'double extra comma', 21)) + + self.assertEqual( + scanstring('["Comma after the close"],', 2, None, True), + (u'Comma after the close', 24)) + + self.assertEqual( + scanstring('["Extra close"]]', 2, None, True), + (u'Extra close', 14)) + + self.assertEqual( + scanstring('{"Extra comma": true,}', 2, None, True), + (u'Extra comma', 14)) + + self.assertEqual( + scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True), + (u'Extra value after close', 26)) + + self.assertEqual( + scanstring('{"Illegal expression": 1 + 2}', 2, None, True), + (u'Illegal expression', 21)) + + self.assertEqual( + scanstring('{"Illegal invocation": alert()}', 2, None, True), + (u'Illegal invocation', 21)) + + self.assertEqual( + scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True), + (u'Numbers cannot have leading zeroes', 37)) + + self.assertEqual( + scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True), + (u'Numbers cannot be hex', 24)) + + self.assertEqual( + scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True), + (u'Too deep', 30)) + + self.assertEqual( + scanstring('{"Missing colon" null}', 2, None, True), + (u'Missing colon', 16)) + + self.assertEqual( + scanstring('{"Double colon":: null}', 2, None, True), + (u'Double colon', 15)) + + self.assertEqual( + scanstring('{"Comma instead of colon", null}', 2, None, True), + (u'Comma instead of colon', 25)) + + self.assertEqual( + scanstring('["Colon instead of comma": false]', 2, None, True), + (u'Colon instead of comma', 25)) + + self.assertEqual( + scanstring('["Bad value", truth]', 2, None, True), + (u'Bad value', 12)) + + for c in map(chr, range(0x00, 0x1f)): + self.assertEqual( + scanstring(c + '"', 0, None, False), + (c, 2)) + self.assertRaises( + ValueError, + scanstring, c + '"', 0, None, True) + + self.assertRaises(ValueError, scanstring, '', 0, None, True) + self.assertRaises(ValueError, scanstring, 'a', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\u', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\u0', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\u01', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\u012', 0, None, True) + self.assertRaises(ValueError, scanstring, '\\u0123', 0, None, True) + if sys.maxunicode > 65535: + self.assertRaises(ValueError, + scanstring, '\\ud834\\u"', 0, None, True) + self.assertRaises(ValueError, + scanstring, '\\ud834\\x0123"', 0, None, True) + + def test_issue3623(self): + self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1, + "xxx") + self.assertRaises(UnicodeDecodeError, + json.encoder.encode_basestring_ascii, b("xx\xff")) + + def test_overflow(self): + # Python 2.5 does not have maxsize, Python 3 does not have maxint + maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None)) + assert maxsize is not None + self.assertRaises(OverflowError, json.decoder.scanstring, "xxx", + maxsize + 1) + + def test_surrogates(self): + scanstring = json.decoder.scanstring + + def assertScan(given, expect, test_utf8=True): + givens = [given] + if not PY3 and test_utf8: + givens.append(given.encode('utf8')) + for given in givens: + (res, count) = scanstring(given, 1, None, True) + self.assertEqual(len(given), count) + self.assertEqual(res, expect) + + assertScan( + u'"z\\ud834\\u0079x"', + u'z\ud834yx') + assertScan( + u'"z\\ud834\\udd20x"', + u'z\U0001d120x') + assertScan( + u'"z\\ud834\\ud834\\udd20x"', + u'z\ud834\U0001d120x') + assertScan( + u'"z\\ud834x"', + u'z\ud834x') + assertScan( + u'"z\\udd20x"', + u'z\udd20x') + assertScan( + u'"z\ud834x"', + u'z\ud834x') + # It may look strange to join strings together, but Python is drunk. + # https://gist.github.com/etrepum/5538443 + assertScan( + u'"z\\ud834\udd20x12345"', + u''.join([u'z\ud834', u'\udd20x12345'])) + assertScan( + u'"z\ud834\\udd20x"', + u''.join([u'z\ud834', u'\udd20x'])) + # these have different behavior given UTF8 input, because the surrogate + # pair may be joined (in maxunicode > 65535 builds) + assertScan( + u''.join([u'"z\ud834', u'\udd20x"']), + u''.join([u'z\ud834', u'\udd20x']), + test_utf8=False) + + self.assertRaises(ValueError, + scanstring, u'"z\\ud83x"', 1, None, True) + self.assertRaises(ValueError, + scanstring, u'"z\\ud834\\udd2x"', 1, None, True) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_separators.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_separators.py new file mode 100644 index 0000000..91b4d4f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_separators.py @@ -0,0 +1,42 @@ +import textwrap +from unittest import TestCase + +import simplejson as json + + +class TestSeparators(TestCase): + def test_separators(self): + h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth', + {'nifty': 87}, {'field': 'yes', 'morefield': False} ] + + expect = textwrap.dedent("""\ + [ + [ + "blorpie" + ] , + [ + "whoops" + ] , + [] , + "d-shtaeou" , + "d-nthiouh" , + "i-vhbjkhnth" , + { + "nifty" : 87 + } , + { + "field" : "yes" , + "morefield" : false + } + ]""") + + + d1 = json.dumps(h) + d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : ')) + + h1 = json.loads(d1) + h2 = json.loads(d2) + + self.assertEqual(h1, h) + self.assertEqual(h2, h) + self.assertEqual(d2, expect) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_speedups.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_speedups.py new file mode 100644 index 0000000..8b146df --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_speedups.py @@ -0,0 +1,114 @@ +from __future__ import with_statement + +import sys +import unittest +from unittest import TestCase + +import simplejson +from simplejson import encoder, decoder, scanner +from simplejson.compat import PY3, long_type, b + + +def has_speedups(): + return encoder.c_make_encoder is not None + + +def skip_if_speedups_missing(func): + def wrapper(*args, **kwargs): + if not has_speedups(): + if hasattr(unittest, 'SkipTest'): + raise unittest.SkipTest("C Extension not available") + else: + sys.stdout.write("C Extension not available") + return + return func(*args, **kwargs) + + return wrapper + + +class BadBool: + def __bool__(self): + 1/0 + __nonzero__ = __bool__ + + +class TestDecode(TestCase): + @skip_if_speedups_missing + def test_make_scanner(self): + self.assertRaises(AttributeError, scanner.c_make_scanner, 1) + + @skip_if_speedups_missing + def test_bad_bool_args(self): + def test(value): + decoder.JSONDecoder(strict=BadBool()).decode(value) + self.assertRaises(ZeroDivisionError, test, '""') + self.assertRaises(ZeroDivisionError, test, '{}') + if not PY3: + self.assertRaises(ZeroDivisionError, test, u'""') + self.assertRaises(ZeroDivisionError, test, u'{}') + +class TestEncode(TestCase): + @skip_if_speedups_missing + def test_make_encoder(self): + self.assertRaises( + TypeError, + encoder.c_make_encoder, + None, + ("\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7" + "\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75"), + None + ) + + @skip_if_speedups_missing + def test_bad_str_encoder(self): + # Issue #31505: There shouldn't be an assertion failure in case + # c_make_encoder() receives a bad encoder() argument. + import decimal + def bad_encoder1(*args): + return None + enc = encoder.c_make_encoder( + None, lambda obj: str(obj), + bad_encoder1, None, ': ', ', ', + False, False, False, {}, False, False, False, + None, None, 'utf-8', False, False, decimal.Decimal, False) + self.assertRaises(TypeError, enc, 'spam', 4) + self.assertRaises(TypeError, enc, {'spam': 42}, 4) + + def bad_encoder2(*args): + 1/0 + enc = encoder.c_make_encoder( + None, lambda obj: str(obj), + bad_encoder2, None, ': ', ', ', + False, False, False, {}, False, False, False, + None, None, 'utf-8', False, False, decimal.Decimal, False) + self.assertRaises(ZeroDivisionError, enc, 'spam', 4) + + @skip_if_speedups_missing + def test_bad_bool_args(self): + def test(name): + encoder.JSONEncoder(**{name: BadBool()}).encode({}) + self.assertRaises(ZeroDivisionError, test, 'skipkeys') + self.assertRaises(ZeroDivisionError, test, 'ensure_ascii') + self.assertRaises(ZeroDivisionError, test, 'check_circular') + self.assertRaises(ZeroDivisionError, test, 'allow_nan') + self.assertRaises(ZeroDivisionError, test, 'sort_keys') + self.assertRaises(ZeroDivisionError, test, 'use_decimal') + self.assertRaises(ZeroDivisionError, test, 'namedtuple_as_object') + self.assertRaises(ZeroDivisionError, test, 'tuple_as_array') + self.assertRaises(ZeroDivisionError, test, 'bigint_as_string') + self.assertRaises(ZeroDivisionError, test, 'for_json') + self.assertRaises(ZeroDivisionError, test, 'ignore_nan') + self.assertRaises(ZeroDivisionError, test, 'iterable_as_array') + + @skip_if_speedups_missing + def test_int_as_string_bitcount_overflow(self): + long_count = long_type(2)**32+31 + def test(): + encoder.JSONEncoder(int_as_string_bitcount=long_count).encode(0) + self.assertRaises((TypeError, OverflowError), test) + + if PY3: + @skip_if_speedups_missing + def test_bad_encoding(self): + with self.assertRaises(UnicodeEncodeError): + encoder.JSONEncoder(encoding='\udcff').encode({b('key'): 123}) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_str_subclass.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_str_subclass.py new file mode 100644 index 0000000..b6c8351 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_str_subclass.py @@ -0,0 +1,21 @@ +from unittest import TestCase + +import simplejson +from simplejson.compat import text_type + +# Tests for issue demonstrated in https://github.com/simplejson/simplejson/issues/144 +class WonkyTextSubclass(text_type): + def __getslice__(self, start, end): + return self.__class__('not what you wanted!') + +class TestStrSubclass(TestCase): + def test_dump_load(self): + for s in ['', '"hello"', 'text', u'\u005c']: + self.assertEqual( + s, + simplejson.loads(simplejson.dumps(WonkyTextSubclass(s)))) + + self.assertEqual( + s, + simplejson.loads(simplejson.dumps(WonkyTextSubclass(s), + ensure_ascii=False))) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_subclass.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_subclass.py new file mode 100644 index 0000000..2bae3b6 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_subclass.py @@ -0,0 +1,37 @@ +from unittest import TestCase +import simplejson as json + +from decimal import Decimal + +class AlternateInt(int): + def __repr__(self): + return 'invalid json' + __str__ = __repr__ + + +class AlternateFloat(float): + def __repr__(self): + return 'invalid json' + __str__ = __repr__ + + +# class AlternateDecimal(Decimal): +# def __repr__(self): +# return 'invalid json' + + +class TestSubclass(TestCase): + def test_int(self): + self.assertEqual(json.dumps(AlternateInt(1)), '1') + self.assertEqual(json.dumps(AlternateInt(-1)), '-1') + self.assertEqual(json.loads(json.dumps({AlternateInt(1): 1})), {'1': 1}) + + def test_float(self): + self.assertEqual(json.dumps(AlternateFloat(1.0)), '1.0') + self.assertEqual(json.dumps(AlternateFloat(-1.0)), '-1.0') + self.assertEqual(json.loads(json.dumps({AlternateFloat(1.0): 1})), {'1.0': 1}) + + # NOTE: Decimal subclasses are not supported as-is + # def test_decimal(self): + # self.assertEqual(json.dumps(AlternateDecimal('1.0')), '1.0') + # self.assertEqual(json.dumps(AlternateDecimal('-1.0')), '-1.0') diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tool.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tool.py new file mode 100644 index 0000000..914bff8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tool.py @@ -0,0 +1,114 @@ +from __future__ import with_statement +import os +import sys +import textwrap +import unittest +import subprocess +import tempfile +try: + # Python 3.x + from test.support import strip_python_stderr +except ImportError: + # Python 2.6+ + try: + from test.test_support import strip_python_stderr + except ImportError: + # Python 2.5 + import re + def strip_python_stderr(stderr): + return re.sub( + r"\[\d+ refs\]\r?\n?$".encode(), + "".encode(), + stderr).strip() + +def open_temp_file(): + if sys.version_info >= (2, 6): + file = tempfile.NamedTemporaryFile(delete=False) + filename = file.name + else: + fd, filename = tempfile.mkstemp() + file = os.fdopen(fd, 'w+b') + return file, filename + +class TestTool(unittest.TestCase): + data = """ + + [["blorpie"],[ "whoops" ] , [ + ],\t"d-shtaeou",\r"d-nthiouh", + "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field" + :"yes"} ] + """ + + expect = textwrap.dedent("""\ + [ + [ + "blorpie" + ], + [ + "whoops" + ], + [], + "d-shtaeou", + "d-nthiouh", + "i-vhbjkhnth", + { + "nifty": 87 + }, + { + "field": "yes", + "morefield": false + } + ] + """) + + def runTool(self, args=None, data=None): + argv = [sys.executable, '-m', 'simplejson.tool'] + if args: + argv.extend(args) + proc = subprocess.Popen(argv, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + out, err = proc.communicate(data) + self.assertEqual(strip_python_stderr(err), ''.encode()) + self.assertEqual(proc.returncode, 0) + return out.decode('utf8').splitlines() + + def test_stdin_stdout(self): + self.assertEqual( + self.runTool(data=self.data.encode()), + self.expect.splitlines()) + + def test_infile_stdout(self): + infile, infile_name = open_temp_file() + try: + infile.write(self.data.encode()) + infile.close() + self.assertEqual( + self.runTool(args=[infile_name]), + self.expect.splitlines()) + finally: + os.unlink(infile_name) + + def test_infile_outfile(self): + infile, infile_name = open_temp_file() + try: + infile.write(self.data.encode()) + infile.close() + # outfile will get overwritten by tool, so the delete + # may not work on some platforms. Do it manually. + outfile, outfile_name = open_temp_file() + try: + outfile.close() + self.assertEqual( + self.runTool(args=[infile_name, outfile_name]), + []) + with open(outfile_name, 'rb') as f: + self.assertEqual( + f.read().decode('utf8').splitlines(), + self.expect.splitlines() + ) + finally: + os.unlink(outfile_name) + finally: + os.unlink(infile_name) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tuple.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tuple.py new file mode 100644 index 0000000..4ad7b0e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_tuple.py @@ -0,0 +1,47 @@ +import unittest + +from simplejson.compat import StringIO +import simplejson as json + +class TestTuples(unittest.TestCase): + def test_tuple_array_dumps(self): + t = (1, 2, 3) + expect = json.dumps(list(t)) + # Default is True + self.assertEqual(expect, json.dumps(t)) + self.assertEqual(expect, json.dumps(t, tuple_as_array=True)) + self.assertRaises(TypeError, json.dumps, t, tuple_as_array=False) + # Ensure that the "default" does not get called + self.assertEqual(expect, json.dumps(t, default=repr)) + self.assertEqual(expect, json.dumps(t, tuple_as_array=True, + default=repr)) + # Ensure that the "default" gets called + self.assertEqual( + json.dumps(repr(t)), + json.dumps(t, tuple_as_array=False, default=repr)) + + def test_tuple_array_dump(self): + t = (1, 2, 3) + expect = json.dumps(list(t)) + # Default is True + sio = StringIO() + json.dump(t, sio) + self.assertEqual(expect, sio.getvalue()) + sio = StringIO() + json.dump(t, sio, tuple_as_array=True) + self.assertEqual(expect, sio.getvalue()) + self.assertRaises(TypeError, json.dump, t, StringIO(), + tuple_as_array=False) + # Ensure that the "default" does not get called + sio = StringIO() + json.dump(t, sio, default=repr) + self.assertEqual(expect, sio.getvalue()) + sio = StringIO() + json.dump(t, sio, tuple_as_array=True, default=repr) + self.assertEqual(expect, sio.getvalue()) + # Ensure that the "default" gets called + sio = StringIO() + json.dump(t, sio, tuple_as_array=False, default=repr) + self.assertEqual( + json.dumps(repr(t)), + sio.getvalue()) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_unicode.py b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_unicode.py new file mode 100644 index 0000000..0c7b1a6 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tests/test_unicode.py @@ -0,0 +1,154 @@ +import sys +import codecs +from unittest import TestCase + +import simplejson as json +from simplejson.compat import unichr, text_type, b, BytesIO + +class TestUnicode(TestCase): + def test_encoding1(self): + encoder = json.JSONEncoder(encoding='utf-8') + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + s = u.encode('utf-8') + ju = encoder.encode(u) + js = encoder.encode(s) + self.assertEqual(ju, js) + + def test_encoding2(self): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + s = u.encode('utf-8') + ju = json.dumps(u, encoding='utf-8') + js = json.dumps(s, encoding='utf-8') + self.assertEqual(ju, js) + + def test_encoding3(self): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + j = json.dumps(u) + self.assertEqual(j, '"\\u03b1\\u03a9"') + + def test_encoding4(self): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + j = json.dumps([u]) + self.assertEqual(j, '["\\u03b1\\u03a9"]') + + def test_encoding5(self): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + j = json.dumps(u, ensure_ascii=False) + self.assertEqual(j, u'"' + u + u'"') + + def test_encoding6(self): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + j = json.dumps([u], ensure_ascii=False) + self.assertEqual(j, u'["' + u + u'"]') + + def test_big_unicode_encode(self): + u = u'\U0001d120' + self.assertEqual(json.dumps(u), '"\\ud834\\udd20"') + self.assertEqual(json.dumps(u, ensure_ascii=False), u'"\U0001d120"') + + def test_big_unicode_decode(self): + u = u'z\U0001d120x' + self.assertEqual(json.loads('"' + u + '"'), u) + self.assertEqual(json.loads('"z\\ud834\\udd20x"'), u) + + def test_unicode_decode(self): + for i in range(0, 0xd7ff): + u = unichr(i) + #s = '"\\u{0:04x}"'.format(i) + s = '"\\u%04x"' % (i,) + self.assertEqual(json.loads(s), u) + + def test_object_pairs_hook_with_unicode(self): + s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}' + p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4), + (u"qrt", 5), (u"pad", 6), (u"hoy", 7)] + self.assertEqual(json.loads(s), eval(s)) + self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p) + od = json.loads(s, object_pairs_hook=json.OrderedDict) + self.assertEqual(od, json.OrderedDict(p)) + self.assertEqual(type(od), json.OrderedDict) + # the object_pairs_hook takes priority over the object_hook + self.assertEqual(json.loads(s, + object_pairs_hook=json.OrderedDict, + object_hook=lambda x: None), + json.OrderedDict(p)) + + + def test_default_encoding(self): + self.assertEqual(json.loads(u'{"a": "\xe9"}'.encode('utf-8')), + {'a': u'\xe9'}) + + def test_unicode_preservation(self): + self.assertEqual(type(json.loads(u'""')), text_type) + self.assertEqual(type(json.loads(u'"a"')), text_type) + self.assertEqual(type(json.loads(u'["a"]')[0]), text_type) + + def test_ensure_ascii_false_returns_unicode(self): + # http://code.google.com/p/simplejson/issues/detail?id=48 + self.assertEqual(type(json.dumps([], ensure_ascii=False)), text_type) + self.assertEqual(type(json.dumps(0, ensure_ascii=False)), text_type) + self.assertEqual(type(json.dumps({}, ensure_ascii=False)), text_type) + self.assertEqual(type(json.dumps("", ensure_ascii=False)), text_type) + + def test_ensure_ascii_false_bytestring_encoding(self): + # http://code.google.com/p/simplejson/issues/detail?id=48 + doc1 = {u'quux': b('Arr\xc3\xaat sur images')} + doc2 = {u'quux': u'Arr\xeat sur images'} + doc_ascii = '{"quux": "Arr\\u00eat sur images"}' + doc_unicode = u'{"quux": "Arr\xeat sur images"}' + self.assertEqual(json.dumps(doc1), doc_ascii) + self.assertEqual(json.dumps(doc2), doc_ascii) + self.assertEqual(json.dumps(doc1, ensure_ascii=False), doc_unicode) + self.assertEqual(json.dumps(doc2, ensure_ascii=False), doc_unicode) + + def test_ensure_ascii_linebreak_encoding(self): + # http://timelessrepo.com/json-isnt-a-javascript-subset + s1 = u'\u2029\u2028' + s2 = s1.encode('utf8') + expect = '"\\u2029\\u2028"' + expect_non_ascii = u'"\u2029\u2028"' + self.assertEqual(json.dumps(s1), expect) + self.assertEqual(json.dumps(s2), expect) + self.assertEqual(json.dumps(s1, ensure_ascii=False), expect_non_ascii) + self.assertEqual(json.dumps(s2, ensure_ascii=False), expect_non_ascii) + + def test_invalid_escape_sequences(self): + # incomplete escape sequence + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1234') + # invalid escape sequence + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123x"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12x4"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1x34"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ux234"') + if sys.maxunicode > 65535: + # invalid escape sequence for low surrogate + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000x"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00x0"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0x00"') + self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\ux000"') + + def test_ensure_ascii_still_works(self): + # in the ascii range, ensure that everything is the same + for c in map(unichr, range(0, 127)): + self.assertEqual( + json.dumps(c, ensure_ascii=False), + json.dumps(c)) + snowman = u'\N{SNOWMAN}' + self.assertEqual( + json.dumps(c, ensure_ascii=False), + '"' + c + '"') + + def test_strip_bom(self): + content = u"\u3053\u3093\u306b\u3061\u308f" + json_doc = codecs.BOM_UTF8 + b(json.dumps(content)) + self.assertEqual(json.load(BytesIO(json_doc)), content) + for doc in json_doc, json_doc.decode('utf8'): + self.assertEqual(json.loads(doc), content) diff --git a/flask/venv/lib/python3.6/site-packages/simplejson/tool.py b/flask/venv/lib/python3.6/site-packages/simplejson/tool.py new file mode 100644 index 0000000..062e8e2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/simplejson/tool.py @@ -0,0 +1,42 @@ +r"""Command-line tool to validate and pretty-print JSON + +Usage:: + + $ echo '{"json":"obj"}' | python -m simplejson.tool + { + "json": "obj" + } + $ echo '{ 1.2:3.4}' | python -m simplejson.tool + Expecting property name: line 1 column 2 (char 2) + +""" +from __future__ import with_statement +import sys +import simplejson as json + +def main(): + if len(sys.argv) == 1: + infile = sys.stdin + outfile = sys.stdout + elif len(sys.argv) == 2: + infile = open(sys.argv[1], 'r') + outfile = sys.stdout + elif len(sys.argv) == 3: + infile = open(sys.argv[1], 'r') + outfile = open(sys.argv[2], 'w') + else: + raise SystemExit(sys.argv[0] + " [infile [outfile]]") + with infile: + try: + obj = json.load(infile, + object_pairs_hook=json.OrderedDict, + use_decimal=True) + except ValueError: + raise SystemExit(sys.exc_info()[1]) + with outfile: + json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) + outfile.write('\n') + + +if __name__ == '__main__': + main() diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/PKG-INFO b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/PKG-INFO new file mode 100644 index 0000000..39d2f3b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/PKG-INFO @@ -0,0 +1,334 @@ +Metadata-Version: 1.1 +Name: watchdog +Version: 0.9.0 +Summary: Filesystem events monitoring +Home-page: http://github.com/gorakhargosh/watchdog +Author: Yesudeep Mangalapilly +Author-email: yesudeep@gmail.com +License: Apache License 2.0 +Description: Watchdog + ======== + Python API and shell utilities to monitor file system events. + + + Example API Usage + ----------------- + A simple program that uses watchdog to monitor directories specified + as command-line arguments and logs events generated: + + .. code-block:: python + + import sys + import time + import logging + from watchdog.observers import Observer + from watchdog.events import LoggingEventHandler + + if __name__ == "__main__": + logging.basicConfig(level=logging.INFO, + format='%(asctime)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') + path = sys.argv[1] if len(sys.argv) > 1 else '.' + event_handler = LoggingEventHandler() + observer = Observer() + observer.schedule(event_handler, path, recursive=True) + observer.start() + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + observer.stop() + observer.join() + + + Shell Utilities + --------------- + Watchdog comes with a utility script called ``watchmedo``. + Please type ``watchmedo --help`` at the shell prompt to + know more about this tool. + + Here is how you can log the current directory recursively + for events related only to ``*.py`` and ``*.txt`` files while + ignoring all directory events: + + .. code-block:: bash + + watchmedo log \ + --patterns="*.py;*.txt" \ + --ignore-directories \ + --recursive \ + . + + You can use the ``shell-command`` subcommand to execute shell commands in + response to events: + + .. code-block:: bash + + watchmedo shell-command \ + --patterns="*.py;*.txt" \ + --recursive \ + --command='echo "${watch_src_path}"' \ + . + + Please see the help information for these commands by typing: + + .. code-block:: bash + + watchmedo [command] --help + + + About ``watchmedo`` Tricks + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + ``watchmedo`` can read ``tricks.yaml`` files and execute tricks within them in + response to file system events. Tricks are actually event handlers that + subclass ``watchdog.tricks.Trick`` and are written by plugin authors. Trick + classes are augmented with a few additional features that regular event handlers + don't need. + + An example ``tricks.yaml`` file: + + .. code-block:: yaml + + tricks: + - watchdog.tricks.LoggerTrick: + patterns: ["*.py", "*.js"] + - watchmedo_webtricks.GoogleClosureTrick: + patterns: ['*.js'] + hash_names: true + mappings_format: json # json|yaml|python + mappings_module: app/javascript_mappings + suffix: .min.js + compilation_level: advanced # simple|advanced + source_directory: app/static/js/ + destination_directory: app/public/js/ + files: + index-page: + - app/static/js/vendor/jquery*.js + - app/static/js/base.js + - app/static/js/index-page.js + about-page: + - app/static/js/vendor/jquery*.js + - app/static/js/base.js + - app/static/js/about-page/**/*.js + + The directory containing the ``tricks.yaml`` file will be monitored. Each trick + class is initialized with its corresponding keys in the ``tricks.yaml`` file as + arguments and events are fed to an instance of this class as they arrive. + + Tricks will be included in the 0.5.0 release. I need community input about them. + Please file enhancement requests at the `issue tracker`_. + + + Installation + ------------ + Installing from PyPI using ``pip``: + + .. code-block:: bash + + $ pip install watchdog + + Installing from PyPI using ``easy_install``: + + .. code-block:: bash + + $ easy_install watchdog + + Installing from source: + + .. code-block:: bash + + $ python setup.py install + + + Installation Caveats + ~~~~~~~~~~~~~~~~~~~~ + The ``watchmedo`` script depends on PyYAML_ which links with LibYAML_, + which brings a performance boost to the PyYAML parser. However, installing + LibYAML_ is optional but recommended. On Mac OS X, you can use homebrew_ + to install LibYAML: + + .. code-block:: bash + + $ brew install libyaml + + On Linux, use your favorite package manager to install LibYAML. Here's how you + do it on Ubuntu: + + .. code-block:: bash + + $ sudo aptitude install libyaml-dev + + On Windows, please install PyYAML_ using the binaries they provide. + + Documentation + ------------- + You can browse the latest release documentation_ online. + + Contribute + ---------- + Fork the `repository`_ on GitHub and send a pull request, or file an issue + ticket at the `issue tracker`_. For general help and questions use the official + `mailing list`_ or ask on `stackoverflow`_ with tag `python-watchdog`. + + Create and activate your virtual environment, then:: + + pip install pytest + pip install -e . + py.test tests + + + Supported Platforms + ------------------- + * Linux 2.6 (inotify) + * Mac OS X (FSEvents, kqueue) + * FreeBSD/BSD (kqueue) + * Windows (ReadDirectoryChangesW with I/O completion ports; + ReadDirectoryChangesW worker threads) + * OS-independent (polling the disk for directory snapshots and comparing them + periodically; slow and not recommended) + + Note that when using watchdog with kqueue, you need the + number of file descriptors allowed to be opened by programs + running on your system to be increased to more than the + number of files that you will be monitoring. The easiest way + to do that is to edit your ``~/.profile`` file and add + a line similar to:: + + ulimit -n 1024 + + This is an inherent problem with kqueue because it uses + file descriptors to monitor files. That plus the enormous + amount of bookkeeping that watchdog needs to do in order + to monitor file descriptors just makes this a painful way + to monitor files and directories. In essence, kqueue is + not a very scalable way to monitor a deeply nested + directory of files and directories with a large number of + files. + + About using watchdog with editors like Vim + ------------------------------------------ + Vim does not modify files unless directed to do so. + It creates backup files and then swaps them in to replace + the files you are editing on the disk. This means that + if you use Vim to edit your files, the on-modified events + for those files will not be triggered by watchdog. + You may need to configure Vim to appropriately to disable + this feature. + + + Dependencies + ------------ + 1. Python 2.6 or above. + 2. pathtools_ + 3. select_backport_ (select.kqueue replacement for 2.6 on BSD/Mac OS X) + 4. XCode_ (only on Mac OS X) + 5. PyYAML_ (only for ``watchmedo`` script) + 6. argh_ (only for ``watchmedo`` script) + + + Licensing + --------- + Watchdog is licensed under the terms of the `Apache License, version 2.0`_. + + Copyright 2011 `Yesudeep Mangalapilly`_. + + Copyright 2012 Google, Inc. + + Project `source code`_ is available at Github. Please report bugs and file + enhancement requests at the `issue tracker`_. + + Why Watchdog? + ------------- + Too many people tried to do the same thing and none did what I needed Python + to do: + + * pnotify_ + * `unison fsmonitor`_ + * fsmonitor_ + * guard_ + * pyinotify_ + * `inotify-tools`_ + * jnotify_ + * treewalker_ + * `file.monitor`_ + * pyfilesystem_ + + .. links: + .. _Yesudeep Mangalapilly: yesudeep@gmail.com + .. _source code: http://github.com/gorakhargosh/watchdog + .. _issue tracker: http://github.com/gorakhargosh/watchdog/issues + .. _Apache License, version 2.0: http://www.apache.org/licenses/LICENSE-2.0 + .. _documentation: http://packages.python.org/watchdog/ + .. _stackoverflow: http://stackoverflow.com/questions/tagged/python-watchdog + .. _mailing list: http://groups.google.com/group/watchdog-python + .. _repository: http://github.com/gorakhargosh/watchdog + .. _issue tracker: http://github.com/gorakhargosh/watchdog/issues + + .. _homebrew: http://mxcl.github.com/homebrew/ + .. _select_backport: http://pypi.python.org/pypi/select_backport + .. _argh: http://pypi.python.org/pypi/argh + .. _PyYAML: http://www.pyyaml.org/ + .. _XCode: http://developer.apple.com/technologies/tools/xcode.html + .. _LibYAML: http://pyyaml.org/wiki/LibYAML + .. _pathtools: http://github.com/gorakhargosh/pathtools + + .. _pnotify: http://mark.heily.com/pnotify + .. _unison fsmonitor: https://webdav.seas.upenn.edu/viewvc/unison/trunk/src/fsmonitor.py?view=markup&pathrev=471 + .. _fsmonitor: http://github.com/shaurz/fsmonitor + .. _guard: http://github.com/guard/guard + .. _pyinotify: http://github.com/seb-m/pyinotify + .. _inotify-tools: http://github.com/rvoicilas/inotify-tools + .. _jnotify: http://jnotify.sourceforge.net/ + .. _treewalker: http://github.com/jbd/treewatcher + .. _file.monitor: http://github.com/pke/file.monitor + .. _pyfilesystem: http://code.google.com/p/pyfilesystem + + + .. :changelog: + + API changes + ----------- + + 0.8.2 + ~~~~~ + + - Event emitters are no longer started on schedule if ``Observer`` is not + already running. + + + 0.8.0 + ~~~~~ + + - ``DirectorySnapshot``: methods returning internal stat info replaced by + ``mtime``, ``inode`` and ``path`` methods. + - ``DirectorySnapshot``: ``walker_callback`` parameter deprecated. + +Keywords: python filesystem monitoring monitor FSEvents kqueue inotify ReadDirectoryChangesW polling DirectorySnapshot +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: Microsoft :: Windows :: Windows NT/2000 +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: C +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: System :: Monitoring +Classifier: Topic :: System :: Filesystems +Classifier: Topic :: Utilities diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/SOURCES.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/SOURCES.txt new file mode 100644 index 0000000..5dd0b82 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/SOURCES.txt @@ -0,0 +1,75 @@ +AUTHORS +COPYING +LICENSE +MANIFEST.in +README.rst +changelog.rst +setup.cfg +setup.py +docs/Makefile +docs/dependencies.txt +docs/echo.py.txt +docs/eclipse_cdt_style.xml +docs/make.bat +docs/source/api.rst +docs/source/conf.py +docs/source/global.rst.inc +docs/source/hacking.rst +docs/source/index.rst +docs/source/installation.rst +docs/source/quickstart.rst +docs/source/examples/logger.py +docs/source/examples/patterns.py +docs/source/examples/simple.py +docs/source/examples/tricks.json +docs/source/examples/tricks.yaml +src/watchdog_fsevents.c +src/watchdog/__init__.py +src/watchdog/events.py +src/watchdog/version.py +src/watchdog/watchmedo.py +src/watchdog.egg-info/PKG-INFO +src/watchdog.egg-info/SOURCES.txt +src/watchdog.egg-info/dependency_links.txt +src/watchdog.egg-info/entry_points.txt +src/watchdog.egg-info/not-zip-safe +src/watchdog.egg-info/requires.txt +src/watchdog.egg-info/top_level.txt +src/watchdog/observers/__init__.py +src/watchdog/observers/api.py +src/watchdog/observers/fsevents.py +src/watchdog/observers/fsevents2.py +src/watchdog/observers/inotify.py +src/watchdog/observers/inotify_buffer.py +src/watchdog/observers/inotify_c.py +src/watchdog/observers/kqueue.py +src/watchdog/observers/polling.py +src/watchdog/observers/read_directory_changes.py +src/watchdog/observers/winapi.py +src/watchdog/tricks/__init__.py +src/watchdog/utils/__init__.py +src/watchdog/utils/bricks.py +src/watchdog/utils/compat.py +src/watchdog/utils/decorators.py +src/watchdog/utils/delayed_queue.py +src/watchdog/utils/dirsnapshot.py +src/watchdog/utils/echo.py +src/watchdog/utils/event_backport.py +src/watchdog/utils/importlib2.py +src/watchdog/utils/platform.py +src/watchdog/utils/unicode_paths.py +src/watchdog/utils/win32stat.py +tests/__init__.py +tests/shell.py +tests/test_delayed_queue.py +tests/test_emitter.py +tests/test_inotify_buffer.py +tests/test_inotify_c.py +tests/test_observer.py +tests/test_skip_repeats_queue.py +tests/test_snapshot_diff.py +tests/legacy/test_watch_observers_winapi.py +tests/legacy/test_watchdog_events.py +tests/legacy/test_watchdog_observers_api.py +tests/legacy/test_watchdog_observers_polling.py +tests/legacy/utils.py \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/dependency_links.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/entry_points.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/entry_points.txt new file mode 100644 index 0000000..043c91e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +watchmedo = watchdog.watchmedo:main + diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/installed-files.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/installed-files.txt new file mode 100644 index 0000000..8b27537 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/installed-files.txt @@ -0,0 +1,64 @@ +../../../../bin/watchmedo +../watchdog/__init__.py +../watchdog/__pycache__/__init__.cpython-36.pyc +../watchdog/__pycache__/events.cpython-36.pyc +../watchdog/__pycache__/version.cpython-36.pyc +../watchdog/__pycache__/watchmedo.cpython-36.pyc +../watchdog/events.py +../watchdog/observers/__init__.py +../watchdog/observers/__pycache__/__init__.cpython-36.pyc +../watchdog/observers/__pycache__/api.cpython-36.pyc +../watchdog/observers/__pycache__/fsevents.cpython-36.pyc +../watchdog/observers/__pycache__/fsevents2.cpython-36.pyc +../watchdog/observers/__pycache__/inotify.cpython-36.pyc +../watchdog/observers/__pycache__/inotify_buffer.cpython-36.pyc +../watchdog/observers/__pycache__/inotify_c.cpython-36.pyc +../watchdog/observers/__pycache__/kqueue.cpython-36.pyc +../watchdog/observers/__pycache__/polling.cpython-36.pyc +../watchdog/observers/__pycache__/read_directory_changes.cpython-36.pyc +../watchdog/observers/__pycache__/winapi.cpython-36.pyc +../watchdog/observers/api.py +../watchdog/observers/fsevents.py +../watchdog/observers/fsevents2.py +../watchdog/observers/inotify.py +../watchdog/observers/inotify_buffer.py +../watchdog/observers/inotify_c.py +../watchdog/observers/kqueue.py +../watchdog/observers/polling.py +../watchdog/observers/read_directory_changes.py +../watchdog/observers/winapi.py +../watchdog/tricks/__init__.py +../watchdog/tricks/__pycache__/__init__.cpython-36.pyc +../watchdog/utils/__init__.py +../watchdog/utils/__pycache__/__init__.cpython-36.pyc +../watchdog/utils/__pycache__/bricks.cpython-36.pyc +../watchdog/utils/__pycache__/compat.cpython-36.pyc +../watchdog/utils/__pycache__/decorators.cpython-36.pyc +../watchdog/utils/__pycache__/delayed_queue.cpython-36.pyc +../watchdog/utils/__pycache__/dirsnapshot.cpython-36.pyc +../watchdog/utils/__pycache__/echo.cpython-36.pyc +../watchdog/utils/__pycache__/event_backport.cpython-36.pyc +../watchdog/utils/__pycache__/importlib2.cpython-36.pyc +../watchdog/utils/__pycache__/platform.cpython-36.pyc +../watchdog/utils/__pycache__/unicode_paths.cpython-36.pyc +../watchdog/utils/__pycache__/win32stat.cpython-36.pyc +../watchdog/utils/bricks.py +../watchdog/utils/compat.py +../watchdog/utils/decorators.py +../watchdog/utils/delayed_queue.py +../watchdog/utils/dirsnapshot.py +../watchdog/utils/echo.py +../watchdog/utils/event_backport.py +../watchdog/utils/importlib2.py +../watchdog/utils/platform.py +../watchdog/utils/unicode_paths.py +../watchdog/utils/win32stat.py +../watchdog/version.py +../watchdog/watchmedo.py +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/not-zip-safe b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/requires.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/requires.txt new file mode 100644 index 0000000..55db381 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/requires.txt @@ -0,0 +1,3 @@ +PyYAML>=3.10 +argh>=0.24.1 +pathtools>=0.1.1 diff --git a/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/top_level.txt b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/top_level.txt new file mode 100644 index 0000000..e59495e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog-0.9.0.egg-info/top_level.txt @@ -0,0 +1 @@ +watchdog diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/__init__.py b/flask/venv/lib/python3.6/site-packages/watchdog/__init__.py new file mode 100644 index 0000000..1a641ff --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/__init__.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..5eb99bc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/events.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/events.cpython-36.pyc new file mode 100644 index 0000000..4e5ea49 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/events.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/version.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/version.cpython-36.pyc new file mode 100644 index 0000000..c362506 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/version.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/watchmedo.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/watchmedo.cpython-36.pyc new file mode 100644 index 0000000..2bcc6b8 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/__pycache__/watchmedo.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/events.py b/flask/venv/lib/python3.6/site-packages/watchdog/events.py new file mode 100644 index 0000000..7c1f075 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/events.py @@ -0,0 +1,615 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.events +:synopsis: File system events and event handlers. +:author: yesudeep@google.com (Yesudeep Mangalapilly) + +Event Classes +------------- +.. autoclass:: FileSystemEvent + :members: + :show-inheritance: + :inherited-members: + +.. autoclass:: FileSystemMovedEvent + :members: + :show-inheritance: + +.. autoclass:: FileMovedEvent + :members: + :show-inheritance: + +.. autoclass:: DirMovedEvent + :members: + :show-inheritance: + +.. autoclass:: FileModifiedEvent + :members: + :show-inheritance: + +.. autoclass:: DirModifiedEvent + :members: + :show-inheritance: + +.. autoclass:: FileCreatedEvent + :members: + :show-inheritance: + +.. autoclass:: DirCreatedEvent + :members: + :show-inheritance: + +.. autoclass:: FileDeletedEvent + :members: + :show-inheritance: + +.. autoclass:: DirDeletedEvent + :members: + :show-inheritance: + + +Event Handler Classes +--------------------- +.. autoclass:: FileSystemEventHandler + :members: + :show-inheritance: + +.. autoclass:: PatternMatchingEventHandler + :members: + :show-inheritance: + +.. autoclass:: RegexMatchingEventHandler + :members: + :show-inheritance: + +.. autoclass:: LoggingEventHandler + :members: + :show-inheritance: + +""" + +import os.path +import logging +import re +from pathtools.patterns import match_any_paths +from watchdog.utils import has_attribute +from watchdog.utils import unicode_paths + + +EVENT_TYPE_MOVED = 'moved' +EVENT_TYPE_DELETED = 'deleted' +EVENT_TYPE_CREATED = 'created' +EVENT_TYPE_MODIFIED = 'modified' + + +class FileSystemEvent(object): + """ + Immutable type that represents a file system event that is triggered + when a change occurs on the monitored file system. + + All FileSystemEvent objects are required to be immutable and hence + can be used as keys in dictionaries or be added to sets. + """ + + event_type = None + """The type of the event as a string.""" + + is_directory = False + """True if event was emitted for a directory; False otherwise.""" + + def __init__(self, src_path): + self._src_path = src_path + + @property + def src_path(self): + """Source path of the file system object that triggered this event.""" + return self._src_path + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return ("<%(class_name)s: event_type=%(event_type)s, " + "src_path=%(src_path)r, " + "is_directory=%(is_directory)s>" + ) % (dict( + class_name=self.__class__.__name__, + event_type=self.event_type, + src_path=self.src_path, + is_directory=self.is_directory)) + + # Used for comparison of events. + @property + def key(self): + return (self.event_type, self.src_path, self.is_directory) + + def __eq__(self, event): + return self.key == event.key + + def __ne__(self, event): + return self.key != event.key + + def __hash__(self): + return hash(self.key) + + +class FileSystemMovedEvent(FileSystemEvent): + """ + File system event representing any kind of file system movement. + """ + + event_type = EVENT_TYPE_MOVED + + def __init__(self, src_path, dest_path): + super(FileSystemMovedEvent, self).__init__(src_path) + self._dest_path = dest_path + + @property + def dest_path(self): + """The destination path of the move event.""" + return self._dest_path + + # Used for hashing this as an immutable object. + @property + def key(self): + return (self.event_type, self.src_path, self.dest_path, self.is_directory) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r, " + "dest_path=%(dest_path)r, " + "is_directory=%(is_directory)s>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path, + dest_path=self.dest_path, + is_directory=self.is_directory)) + + +# File events. + + +class FileDeletedEvent(FileSystemEvent): + """File system event representing file deletion on the file system.""" + + event_type = EVENT_TYPE_DELETED + + def __init__(self, src_path): + super(FileDeletedEvent, self).__init__(src_path) + + def __repr__(self): + return "<%(class_name)s: src_path=%(src_path)r>" %\ + dict(class_name=self.__class__.__name__, + src_path=self.src_path) + + +class FileModifiedEvent(FileSystemEvent): + """File system event representing file modification on the file system.""" + + event_type = EVENT_TYPE_MODIFIED + + def __init__(self, src_path): + super(FileModifiedEvent, self).__init__(src_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path)) + + +class FileCreatedEvent(FileSystemEvent): + """File system event representing file creation on the file system.""" + + event_type = EVENT_TYPE_CREATED + + def __init__(self, src_path): + super(FileCreatedEvent, self).__init__(src_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path)) + + +class FileMovedEvent(FileSystemMovedEvent): + """File system event representing file movement on the file system.""" + + def __init__(self, src_path, dest_path): + super(FileMovedEvent, self).__init__(src_path, dest_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r, " + "dest_path=%(dest_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path, + dest_path=self.dest_path)) + + +# Directory events. + + +class DirDeletedEvent(FileSystemEvent): + """File system event representing directory deletion on the file system.""" + + event_type = EVENT_TYPE_DELETED + is_directory = True + + def __init__(self, src_path): + super(DirDeletedEvent, self).__init__(src_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path)) + + +class DirModifiedEvent(FileSystemEvent): + """ + File system event representing directory modification on the file system. + """ + + event_type = EVENT_TYPE_MODIFIED + is_directory = True + + def __init__(self, src_path): + super(DirModifiedEvent, self).__init__(src_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path)) + + +class DirCreatedEvent(FileSystemEvent): + """File system event representing directory creation on the file system.""" + + event_type = EVENT_TYPE_CREATED + is_directory = True + + def __init__(self, src_path): + super(DirCreatedEvent, self).__init__(src_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path)) + + +class DirMovedEvent(FileSystemMovedEvent): + """File system event representing directory movement on the file system.""" + + is_directory = True + + def __init__(self, src_path, dest_path): + super(DirMovedEvent, self).__init__(src_path, dest_path) + + def __repr__(self): + return ("<%(class_name)s: src_path=%(src_path)r, " + "dest_path=%(dest_path)r>" + ) % (dict(class_name=self.__class__.__name__, + src_path=self.src_path, + dest_path=self.dest_path)) + + +class FileSystemEventHandler(object): + """ + Base file system event handler that you can override methods from. + """ + + def dispatch(self, event): + """Dispatches events to the appropriate methods. + + :param event: + The event object representing the file system event. + :type event: + :class:`FileSystemEvent` + """ + self.on_any_event(event) + _method_map = { + EVENT_TYPE_MODIFIED: self.on_modified, + EVENT_TYPE_MOVED: self.on_moved, + EVENT_TYPE_CREATED: self.on_created, + EVENT_TYPE_DELETED: self.on_deleted, + } + event_type = event.event_type + _method_map[event_type](event) + + def on_any_event(self, event): + """Catch-all event handler. + + :param event: + The event object representing the file system event. + :type event: + :class:`FileSystemEvent` + """ + + def on_moved(self, event): + """Called when a file or a directory is moved or renamed. + + :param event: + Event representing file/directory movement. + :type event: + :class:`DirMovedEvent` or :class:`FileMovedEvent` + """ + + def on_created(self, event): + """Called when a file or directory is created. + + :param event: + Event representing file/directory creation. + :type event: + :class:`DirCreatedEvent` or :class:`FileCreatedEvent` + """ + + def on_deleted(self, event): + """Called when a file or directory is deleted. + + :param event: + Event representing file/directory deletion. + :type event: + :class:`DirDeletedEvent` or :class:`FileDeletedEvent` + """ + + def on_modified(self, event): + """Called when a file or directory is modified. + + :param event: + Event representing file/directory modification. + :type event: + :class:`DirModifiedEvent` or :class:`FileModifiedEvent` + """ + + +class PatternMatchingEventHandler(FileSystemEventHandler): + """ + Matches given patterns with file paths associated with occurring events. + """ + + def __init__(self, patterns=None, ignore_patterns=None, + ignore_directories=False, case_sensitive=False): + super(PatternMatchingEventHandler, self).__init__() + + self._patterns = patterns + self._ignore_patterns = ignore_patterns + self._ignore_directories = ignore_directories + self._case_sensitive = case_sensitive + + @property + def patterns(self): + """ + (Read-only) + Patterns to allow matching event paths. + """ + return self._patterns + + @property + def ignore_patterns(self): + """ + (Read-only) + Patterns to ignore matching event paths. + """ + return self._ignore_patterns + + @property + def ignore_directories(self): + """ + (Read-only) + ``True`` if directories should be ignored; ``False`` otherwise. + """ + return self._ignore_directories + + @property + def case_sensitive(self): + """ + (Read-only) + ``True`` if path names should be matched sensitive to case; ``False`` + otherwise. + """ + return self._case_sensitive + + def dispatch(self, event): + """Dispatches events to the appropriate methods. + + :param event: + The event object representing the file system event. + :type event: + :class:`FileSystemEvent` + """ + if self.ignore_directories and event.is_directory: + return + + paths = [] + if has_attribute(event, 'dest_path'): + paths.append(unicode_paths.decode(event.dest_path)) + if event.src_path: + paths.append(unicode_paths.decode(event.src_path)) + + if match_any_paths(paths, + included_patterns=self.patterns, + excluded_patterns=self.ignore_patterns, + case_sensitive=self.case_sensitive): + self.on_any_event(event) + _method_map = { + EVENT_TYPE_MODIFIED: self.on_modified, + EVENT_TYPE_MOVED: self.on_moved, + EVENT_TYPE_CREATED: self.on_created, + EVENT_TYPE_DELETED: self.on_deleted, + } + event_type = event.event_type + _method_map[event_type](event) + + +class RegexMatchingEventHandler(FileSystemEventHandler): + """ + Matches given regexes with file paths associated with occurring events. + """ + + def __init__(self, regexes=[r".*"], ignore_regexes=[], + ignore_directories=False, case_sensitive=False): + super(RegexMatchingEventHandler, self).__init__() + + if case_sensitive: + self._regexes = [re.compile(r) for r in regexes] + self._ignore_regexes = [re.compile(r) for r in ignore_regexes] + else: + self._regexes = [re.compile(r, re.I) for r in regexes] + self._ignore_regexes = [re.compile(r, re.I) for r in ignore_regexes] + self._ignore_directories = ignore_directories + self._case_sensitive = case_sensitive + + @property + def regexes(self): + """ + (Read-only) + Regexes to allow matching event paths. + """ + return self._regexes + + @property + def ignore_regexes(self): + """ + (Read-only) + Regexes to ignore matching event paths. + """ + return self._ignore_regexes + + @property + def ignore_directories(self): + """ + (Read-only) + ``True`` if directories should be ignored; ``False`` otherwise. + """ + return self._ignore_directories + + @property + def case_sensitive(self): + """ + (Read-only) + ``True`` if path names should be matched sensitive to case; ``False`` + otherwise. + """ + return self._case_sensitive + + def dispatch(self, event): + """Dispatches events to the appropriate methods. + + :param event: + The event object representing the file system event. + :type event: + :class:`FileSystemEvent` + """ + if self.ignore_directories and event.is_directory: + return + + paths = [] + if has_attribute(event, 'dest_path'): + paths.append(unicode_paths.decode(event.dest_path)) + if event.src_path: + paths.append(unicode_paths.decode(event.src_path)) + + if any(r.match(p) for r in self.ignore_regexes for p in paths): + return + + if any(r.match(p) for r in self.regexes for p in paths): + self.on_any_event(event) + _method_map = { + EVENT_TYPE_MODIFIED: self.on_modified, + EVENT_TYPE_MOVED: self.on_moved, + EVENT_TYPE_CREATED: self.on_created, + EVENT_TYPE_DELETED: self.on_deleted, + } + event_type = event.event_type + _method_map[event_type](event) + + +class LoggingEventHandler(FileSystemEventHandler): + """Logs all the events captured.""" + + def on_moved(self, event): + super(LoggingEventHandler, self).on_moved(event) + + what = 'directory' if event.is_directory else 'file' + logging.info("Moved %s: from %s to %s", what, event.src_path, + event.dest_path) + + def on_created(self, event): + super(LoggingEventHandler, self).on_created(event) + + what = 'directory' if event.is_directory else 'file' + logging.info("Created %s: %s", what, event.src_path) + + def on_deleted(self, event): + super(LoggingEventHandler, self).on_deleted(event) + + what = 'directory' if event.is_directory else 'file' + logging.info("Deleted %s: %s", what, event.src_path) + + def on_modified(self, event): + super(LoggingEventHandler, self).on_modified(event) + + what = 'directory' if event.is_directory else 'file' + logging.info("Modified %s: %s", what, event.src_path) + + +class LoggingFileSystemEventHandler(LoggingEventHandler): + """ + For backwards-compatibility. Please use :class:`LoggingEventHandler` + instead. + """ + + +def generate_sub_moved_events(src_dir_path, dest_dir_path): + """Generates an event list of :class:`DirMovedEvent` and + :class:`FileMovedEvent` objects for all the files and directories within + the given moved directory that were moved along with the directory. + + :param src_dir_path: + The source path of the moved directory. + :param dest_dir_path: + The destination path of the moved directory. + :returns: + An iterable of file system events of type :class:`DirMovedEvent` and + :class:`FileMovedEvent`. + """ + for root, directories, filenames in os.walk(dest_dir_path): + for directory in directories: + full_path = os.path.join(root, directory) + renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None + yield DirMovedEvent(renamed_path, full_path) + for filename in filenames: + full_path = os.path.join(root, filename) + renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None + yield FileMovedEvent(renamed_path, full_path) + + +def generate_sub_created_events(src_dir_path): + """Generates an event list of :class:`DirCreatedEvent` and + :class:`FileCreatedEvent` objects for all the files and directories within + the given moved directory that were moved along with the directory. + + :param src_dir_path: + The source path of the created directory. + :returns: + An iterable of file system events of type :class:`DirCreatedEvent` and + :class:`FileCreatedEvent`. + """ + for root, directories, filenames in os.walk(src_dir_path): + for directory in directories: + yield DirCreatedEvent(os.path.join(root, directory)) + for filename in filenames: + yield FileCreatedEvent(os.path.join(root, filename)) \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__init__.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__init__.py new file mode 100644 index 0000000..2b03ef2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__init__.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.observers +:synopsis: Observer that picks a native implementation if available. +:author: yesudeep@google.com (Yesudeep Mangalapilly) + + +Classes +======= +.. autoclass:: Observer + :members: + :show-inheritance: + :inherited-members: + +Observer thread that schedules watching directories and dispatches +calls to event handlers. + +You can also import platform specific classes directly and use it instead +of :class:`Observer`. Here is a list of implemented observer classes.: + +============== ================================ ============================== +Class Platforms Note +============== ================================ ============================== +|Inotify| Linux 2.6.13+ ``inotify(7)`` based observer +|FSEvents| Mac OS X FSEvents based observer +|Kqueue| Mac OS X and BSD with kqueue(2) ``kqueue(2)`` based observer +|WinApi| MS Windows Windows API-based observer +|Polling| Any fallback implementation +============== ================================ ============================== + +.. |Inotify| replace:: :class:`.inotify.InotifyObserver` +.. |FSEvents| replace:: :class:`.fsevents.FSEventsObserver` +.. |Kqueue| replace:: :class:`.kqueue.KqueueObserver` +.. |WinApi| replace:: :class:`.read_directory_changes.WindowsApiObserver` +.. |WinApiAsync| replace:: :class:`.read_directory_changes_async.WindowsApiAsyncObserver` +.. |Polling| replace:: :class:`.polling.PollingObserver` + +""" + +import warnings +from watchdog.utils import platform +from watchdog.utils import UnsupportedLibc + +if platform.is_linux(): + try: + from .inotify import InotifyObserver as Observer + except UnsupportedLibc: + from .polling import PollingObserver as Observer + +elif platform.is_darwin(): + # FIXME: catching too broad. Error prone + try: + from .fsevents import FSEventsObserver as Observer + except: + try: + from .kqueue import KqueueObserver as Observer + warnings.warn("Failed to import fsevents. Fall back to kqueue") + except: + from .polling import PollingObserver as Observer + warnings.warn("Failed to import fsevents and kqueue. Fall back to polling.") + +elif platform.is_bsd(): + from .kqueue import KqueueObserver as Observer + +elif platform.is_windows(): + # TODO: find a reliable way of checking Windows version and import + # polling explicitly for Windows XP + try: + from .read_directory_changes import WindowsApiObserver as Observer + except: + from .polling import PollingObserver as Observer + warnings.warn("Failed to import read_directory_changes. Fall back to polling.") + +else: + from .polling import PollingObserver as Observer + +__all__ = ["Observer"] diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..b3c8f89 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/api.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/api.cpython-36.pyc new file mode 100644 index 0000000..2ca7a2d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/api.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents.cpython-36.pyc new file mode 100644 index 0000000..f5a2dda Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents2.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents2.cpython-36.pyc new file mode 100644 index 0000000..2ec02e7 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/fsevents2.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify.cpython-36.pyc new file mode 100644 index 0000000..b878a9d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_buffer.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_buffer.cpython-36.pyc new file mode 100644 index 0000000..2817dcf Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_buffer.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_c.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_c.cpython-36.pyc new file mode 100644 index 0000000..a80fb86 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/inotify_c.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/kqueue.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/kqueue.cpython-36.pyc new file mode 100644 index 0000000..efc40e9 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/kqueue.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/polling.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/polling.cpython-36.pyc new file mode 100644 index 0000000..4d35176 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/polling.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/read_directory_changes.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/read_directory_changes.cpython-36.pyc new file mode 100644 index 0000000..7b5515a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/read_directory_changes.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/winapi.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/winapi.cpython-36.pyc new file mode 100644 index 0000000..6f699be Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/observers/__pycache__/winapi.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/api.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/api.py new file mode 100644 index 0000000..30f2e9a --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/api.py @@ -0,0 +1,369 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import with_statement +import threading +from watchdog.utils import BaseThread +from watchdog.utils.compat import queue +from watchdog.utils.bricks import SkipRepeatsQueue + +DEFAULT_EMITTER_TIMEOUT = 1 # in seconds. +DEFAULT_OBSERVER_TIMEOUT = 1 # in seconds. + + +# Collection classes +class EventQueue(SkipRepeatsQueue): + """Thread-safe event queue based on a special queue that skips adding + the same event (:class:`FileSystemEvent`) multiple times consecutively. + Thus avoiding dispatching multiple event handling + calls when multiple identical events are produced quicker than an observer + can consume them. + """ + + +class ObservedWatch(object): + """An scheduled watch. + + :param path: + Path string. + :param recursive: + ``True`` if watch is recursive; ``False`` otherwise. + """ + + def __init__(self, path, recursive): + self._path = path + self._is_recursive = recursive + + @property + def path(self): + """The path that this watch monitors.""" + return self._path + + @property + def is_recursive(self): + """Determines whether subdirectories are watched for the path.""" + return self._is_recursive + + @property + def key(self): + return self.path, self.is_recursive + + def __eq__(self, watch): + return self.key == watch.key + + def __ne__(self, watch): + return self.key != watch.key + + def __hash__(self): + return hash(self.key) + + def __repr__(self): + return "" % ( + self.path, self.is_recursive) + + +# Observer classes +class EventEmitter(BaseThread): + """ + Producer thread base class subclassed by event emitters + that generate events and populate a queue with them. + + :param event_queue: + The event queue to populate with generated events. + :type event_queue: + :class:`watchdog.events.EventQueue` + :param watch: + The watch to observe and produce events for. + :type watch: + :class:`ObservedWatch` + :param timeout: + Timeout (in seconds) between successive attempts at reading events. + :type timeout: + ``float`` + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + BaseThread.__init__(self) + self._event_queue = event_queue + self._watch = watch + self._timeout = timeout + + @property + def timeout(self): + """ + Blocking timeout for reading events. + """ + return self._timeout + + @property + def watch(self): + """ + The watch associated with this emitter. + """ + return self._watch + + def queue_event(self, event): + """ + Queues a single event. + + :param event: + Event to be queued. + :type event: + An instance of :class:`watchdog.events.FileSystemEvent` + or a subclass. + """ + self._event_queue.put((event, self.watch)) + + def queue_events(self, timeout): + """Override this method to populate the event queue with events + per interval period. + + :param timeout: + Timeout (in seconds) between successive attempts at + reading events. + :type timeout: + ``float`` + """ + + def run(self): + try: + while self.should_keep_running(): + self.queue_events(self.timeout) + finally: + pass + + +class EventDispatcher(BaseThread): + """ + Consumer thread base class subclassed by event observer threads + that dispatch events from an event queue to appropriate event handlers. + + :param timeout: + Event queue blocking timeout (in seconds). + :type timeout: + ``float`` + """ + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseThread.__init__(self) + self._event_queue = EventQueue() + self._timeout = timeout + + @property + def timeout(self): + """Event queue block timeout.""" + return self._timeout + + @property + def event_queue(self): + """The event queue which is populated with file system events + by emitters and from which events are dispatched by a dispatcher + thread.""" + return self._event_queue + + def dispatch_events(self, event_queue, timeout): + """Override this method to consume events from an event queue, blocking + on the queue for the specified timeout before raising :class:`queue.Empty`. + + :param event_queue: + Event queue to populate with one set of events. + :type event_queue: + :class:`EventQueue` + :param timeout: + Interval period (in seconds) to wait before timing out on the + event queue. + :type timeout: + ``float`` + :raises: + :class:`queue.Empty` + """ + + def run(self): + while self.should_keep_running(): + try: + self.dispatch_events(self.event_queue, self.timeout) + except queue.Empty: + continue + + +class BaseObserver(EventDispatcher): + """Base observer.""" + + def __init__(self, emitter_class, timeout=DEFAULT_OBSERVER_TIMEOUT): + EventDispatcher.__init__(self, timeout) + self._emitter_class = emitter_class + self._lock = threading.RLock() + self._watches = set() + self._handlers = dict() + self._emitters = set() + self._emitter_for_watch = dict() + + def _add_emitter(self, emitter): + self._emitter_for_watch[emitter.watch] = emitter + self._emitters.add(emitter) + + def _remove_emitter(self, emitter): + del self._emitter_for_watch[emitter.watch] + self._emitters.remove(emitter) + emitter.stop() + try: + emitter.join() + except RuntimeError: + pass + + def _clear_emitters(self): + for emitter in self._emitters: + emitter.stop() + for emitter in self._emitters: + try: + emitter.join() + except RuntimeError: + pass + self._emitters.clear() + self._emitter_for_watch.clear() + + def _add_handler_for_watch(self, event_handler, watch): + if watch not in self._handlers: + self._handlers[watch] = set() + self._handlers[watch].add(event_handler) + + def _remove_handlers_for_watch(self, watch): + del self._handlers[watch] + + @property + def emitters(self): + """Returns event emitter created by this observer.""" + return self._emitters + + def start(self): + for emitter in self._emitters: + emitter.start() + super(BaseObserver, self).start() + + def schedule(self, event_handler, path, recursive=False): + """ + Schedules watching a path and calls appropriate methods specified + in the given event handler in response to file system events. + + :param event_handler: + An event handler instance that has appropriate event handling + methods which will be called by the observer in response to + file system events. + :type event_handler: + :class:`watchdog.events.FileSystemEventHandler` or a subclass + :param path: + Directory path that will be monitored. + :type path: + ``str`` + :param recursive: + ``True`` if events will be emitted for sub-directories + traversed recursively; ``False`` otherwise. + :type recursive: + ``bool`` + :return: + An :class:`ObservedWatch` object instance representing + a watch. + """ + with self._lock: + watch = ObservedWatch(path, recursive) + self._add_handler_for_watch(event_handler, watch) + + # If we don't have an emitter for this watch already, create it. + if self._emitter_for_watch.get(watch) is None: + emitter = self._emitter_class(event_queue=self.event_queue, + watch=watch, + timeout=self.timeout) + self._add_emitter(emitter) + if self.is_alive(): + emitter.start() + self._watches.add(watch) + return watch + + def add_handler_for_watch(self, event_handler, watch): + """Adds a handler for the given watch. + + :param event_handler: + An event handler instance that has appropriate event handling + methods which will be called by the observer in response to + file system events. + :type event_handler: + :class:`watchdog.events.FileSystemEventHandler` or a subclass + :param watch: + The watch to add a handler for. + :type watch: + An instance of :class:`ObservedWatch` or a subclass of + :class:`ObservedWatch` + """ + with self._lock: + self._add_handler_for_watch(event_handler, watch) + + def remove_handler_for_watch(self, event_handler, watch): + """Removes a handler for the given watch. + + :param event_handler: + An event handler instance that has appropriate event handling + methods which will be called by the observer in response to + file system events. + :type event_handler: + :class:`watchdog.events.FileSystemEventHandler` or a subclass + :param watch: + The watch to remove a handler for. + :type watch: + An instance of :class:`ObservedWatch` or a subclass of + :class:`ObservedWatch` + """ + with self._lock: + self._handlers[watch].remove(event_handler) + + def unschedule(self, watch): + """Unschedules a watch. + + :param watch: + The watch to unschedule. + :type watch: + An instance of :class:`ObservedWatch` or a subclass of + :class:`ObservedWatch` + """ + with self._lock: + emitter = self._emitter_for_watch[watch] + del self._handlers[watch] + self._remove_emitter(emitter) + self._watches.remove(watch) + + def unschedule_all(self): + """Unschedules all watches and detaches all associated event + handlers.""" + with self._lock: + self._handlers.clear() + self._clear_emitters() + self._watches.clear() + + def on_thread_stop(self): + self.unschedule_all() + + def dispatch_events(self, event_queue, timeout): + event, watch = event_queue.get(block=True, timeout=timeout) + + with self._lock: + # To allow unschedule/stop and safe removal of event handlers + # within event handlers itself, check if the handler is still + # registered after every dispatch. + for handler in list(self._handlers.get(watch, [])): + if handler in self._handlers.get(watch, []): + handler.dispatch(event) + event_queue.task_done() diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents.py new file mode 100644 index 0000000..5edfebe --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.observers.fsevents +:synopsis: FSEvents based emitter implementation. +:author: yesudeep@google.com (Yesudeep Mangalapilly) +:platforms: Mac OS X +""" + +from __future__ import with_statement + +import sys +import threading +import unicodedata +import _watchdog_fsevents as _fsevents + +from watchdog.events import ( + FileDeletedEvent, + FileModifiedEvent, + FileCreatedEvent, + FileMovedEvent, + DirDeletedEvent, + DirModifiedEvent, + DirCreatedEvent, + DirMovedEvent +) + +from watchdog.utils.dirsnapshot import DirectorySnapshot +from watchdog.observers.api import ( + BaseObserver, + EventEmitter, + DEFAULT_EMITTER_TIMEOUT, + DEFAULT_OBSERVER_TIMEOUT +) + + +class FSEventsEmitter(EventEmitter): + + """ + Mac OS X FSEvents Emitter class. + + :param event_queue: + The event queue to fill with events. + :param watch: + A watch object representing the directory to monitor. + :type watch: + :class:`watchdog.observers.api.ObservedWatch` + :param timeout: + Read events blocking timeout (in seconds). + :type timeout: + ``float`` + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + EventEmitter.__init__(self, event_queue, watch, timeout) + self._lock = threading.Lock() + self.snapshot = DirectorySnapshot(watch.path, watch.is_recursive) + + def on_thread_stop(self): + _fsevents.remove_watch(self.watch) + _fsevents.stop(self) + + def queue_events(self, timeout): + with self._lock: + if not self.watch.is_recursive\ + and self.watch.path not in self.pathnames: + return + new_snapshot = DirectorySnapshot(self.watch.path, + self.watch.is_recursive) + events = new_snapshot - self.snapshot + self.snapshot = new_snapshot + + # Files. + for src_path in events.files_deleted: + self.queue_event(FileDeletedEvent(src_path)) + for src_path in events.files_modified: + self.queue_event(FileModifiedEvent(src_path)) + for src_path in events.files_created: + self.queue_event(FileCreatedEvent(src_path)) + for src_path, dest_path in events.files_moved: + self.queue_event(FileMovedEvent(src_path, dest_path)) + + # Directories. + for src_path in events.dirs_deleted: + self.queue_event(DirDeletedEvent(src_path)) + for src_path in events.dirs_modified: + self.queue_event(DirModifiedEvent(src_path)) + for src_path in events.dirs_created: + self.queue_event(DirCreatedEvent(src_path)) + for src_path, dest_path in events.dirs_moved: + self.queue_event(DirMovedEvent(src_path, dest_path)) + + def run(self): + try: + def callback(pathnames, flags, emitter=self): + emitter.queue_events(emitter.timeout) + + # for pathname, flag in zip(pathnames, flags): + # if emitter.watch.is_recursive: # and pathname != emitter.watch.path: + # new_sub_snapshot = DirectorySnapshot(pathname, True) + # old_sub_snapshot = self.snapshot.copy(pathname) + # diff = new_sub_snapshot - old_sub_snapshot + # self.snapshot += new_subsnapshot + # else: + # new_snapshot = DirectorySnapshot(emitter.watch.path, False) + # diff = new_snapshot - emitter.snapshot + # emitter.snapshot = new_snapshot + + # INFO: FSEvents reports directory notifications recursively + # by default, so we do not need to add subdirectory paths. + #pathnames = set([self.watch.path]) + # if self.watch.is_recursive: + # for root, directory_names, _ in os.walk(self.watch.path): + # for directory_name in directory_names: + # full_path = absolute_path( + # os.path.join(root, directory_name)) + # pathnames.add(full_path) + self.pathnames = [self.watch.path] + _fsevents.add_watch(self, + self.watch, + callback, + self.pathnames) + _fsevents.read_events(self) + except: + pass + + +class FSEventsObserver(BaseObserver): + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseObserver.__init__(self, emitter_class=FSEventsEmitter, + timeout=timeout) + + def schedule(self, event_handler, path, recursive=False): + # Python 2/3 compat + try: + str_class = unicode + except NameError: + str_class = str + + # Fix for issue #26: Trace/BPT error when given a unicode path + # string. https://github.com/gorakhargosh/watchdog/issues#issue/26 + if isinstance(path, str_class): + #path = unicode(path, 'utf-8') + path = unicodedata.normalize('NFC', path) + # We only encode the path in Python 2 for backwards compatibility. + # On Python 3 we want the path to stay as unicode if possible for + # the sake of path matching not having to be rewritten to use the + # bytes API instead of strings. The _watchdog_fsevent.so code for + # Python 3 can handle both str and bytes paths, which is why we + # do not HAVE to encode it with Python 3. The Python 2 code in + # _watchdog_fsevents.so was not changed for the sake of backwards + # compatibility. + if sys.version_info < (3,): + path = path.encode('utf-8') + return BaseObserver.schedule(self, event_handler, path, recursive) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents2.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents2.py new file mode 100644 index 0000000..20bf064 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/fsevents2.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.observers.fsevents2 +:synopsis: FSEvents based emitter implementation. +:platforms: Mac OS X +""" + +import os +import logging +import unicodedata +from threading import Thread +from watchdog.utils.compat import queue + +from watchdog.events import ( + FileDeletedEvent, + FileModifiedEvent, + FileCreatedEvent, + FileMovedEvent, + DirDeletedEvent, + DirModifiedEvent, + DirCreatedEvent, + DirMovedEvent +) +from watchdog.observers.api import ( + BaseObserver, + EventEmitter, + DEFAULT_EMITTER_TIMEOUT, + DEFAULT_OBSERVER_TIMEOUT, +) + +# pyobjc +import AppKit +from FSEvents import ( + FSEventStreamCreate, + CFRunLoopGetCurrent, + FSEventStreamScheduleWithRunLoop, + FSEventStreamStart, + CFRunLoopRun, + CFRunLoopStop, + FSEventStreamStop, + FSEventStreamInvalidate, + FSEventStreamRelease, +) + +from FSEvents import ( + kCFAllocatorDefault, + kCFRunLoopDefaultMode, + kFSEventStreamEventIdSinceNow, + kFSEventStreamCreateFlagNoDefer, + kFSEventStreamCreateFlagFileEvents, + kFSEventStreamEventFlagItemCreated, + kFSEventStreamEventFlagItemRemoved, + kFSEventStreamEventFlagItemInodeMetaMod, + kFSEventStreamEventFlagItemRenamed, + kFSEventStreamEventFlagItemModified, + kFSEventStreamEventFlagItemFinderInfoMod, + kFSEventStreamEventFlagItemChangeOwner, + kFSEventStreamEventFlagItemXattrMod, + kFSEventStreamEventFlagItemIsDir, + kFSEventStreamEventFlagItemIsSymlink, +) + +logger = logging.getLogger(__name__) + + +class FSEventsQueue(Thread): + """ Low level FSEvents client. """ + + def __init__(self, path): + Thread.__init__(self) + self._queue = queue.Queue() + self._run_loop = None + + if isinstance(path, bytes): + path = path.decode('utf-8') + self._path = unicodedata.normalize('NFC', path) + + context = None + latency = 1.0 + self._stream_ref = FSEventStreamCreate( + kCFAllocatorDefault, self._callback, context, [self._path], + kFSEventStreamEventIdSinceNow, latency, + kFSEventStreamCreateFlagNoDefer | kFSEventStreamCreateFlagFileEvents) + if self._stream_ref is None: + raise IOError("FSEvents. Could not create stream.") + + def run(self): + pool = AppKit.NSAutoreleasePool.alloc().init() + self._run_loop = CFRunLoopGetCurrent() + FSEventStreamScheduleWithRunLoop( + self._stream_ref, self._run_loop, kCFRunLoopDefaultMode) + if not FSEventStreamStart(self._stream_ref): + FSEventStreamInvalidate(self._stream_ref) + FSEventStreamRelease(self._stream_ref) + raise IOError("FSEvents. Could not start stream.") + + CFRunLoopRun() + FSEventStreamStop(self._stream_ref) + FSEventStreamInvalidate(self._stream_ref) + FSEventStreamRelease(self._stream_ref) + del pool + # Make sure waiting thread is notified + self._queue.put(None) + + def stop(self): + if self._run_loop is not None: + CFRunLoopStop(self._run_loop) + + def _callback(self, streamRef, clientCallBackInfo, numEvents, eventPaths, eventFlags, eventIDs): + events = [NativeEvent(path, flags, _id) for path, flags, _id in + zip(eventPaths, eventFlags, eventIDs)] + logger.debug("FSEvents callback. Got %d events:" % numEvents) + for e in events: + logger.debug(e) + self._queue.put(events) + + def read_events(self): + """ + Returns a list or one or more events, or None if there are no more + events to be read. + """ + if not self.is_alive(): + return None + return self._queue.get() + + +class NativeEvent(object): + def __init__(self, path, flags, event_id): + self.path = path + self.flags = flags + self.event_id = event_id + self.is_created = bool(flags & kFSEventStreamEventFlagItemCreated) + self.is_removed = bool(flags & kFSEventStreamEventFlagItemRemoved) + self.is_renamed = bool(flags & kFSEventStreamEventFlagItemRenamed) + self.is_modified = bool(flags & kFSEventStreamEventFlagItemModified) + self.is_change_owner = bool(flags & kFSEventStreamEventFlagItemChangeOwner) + self.is_inode_meta_mod = bool(flags & kFSEventStreamEventFlagItemInodeMetaMod) + self.is_finder_info_mod = bool(flags & kFSEventStreamEventFlagItemFinderInfoMod) + self.is_xattr_mod = bool(flags & kFSEventStreamEventFlagItemXattrMod) + self.is_symlink = bool(flags & kFSEventStreamEventFlagItemIsSymlink) + self.is_directory = bool(flags & kFSEventStreamEventFlagItemIsDir) + + @property + def _event_type(self): + if self.is_created: return "Created" + if self.is_removed: return "Removed" + if self.is_renamed: return "Renamed" + if self.is_modified: return "Modified" + if self.is_inode_meta_mod: return "InodeMetaMod" + if self.is_xattr_mod: return "XattrMod" + return "Unknown" + + def __repr__(self): + s ="" + return s % (repr(self.path), self._event_type, self.is_directory, hex(self.flags), self.event_id) + + +class FSEventsEmitter(EventEmitter): + """ + FSEvents based event emitter. Handles conversion of native events. + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + EventEmitter.__init__(self, event_queue, watch, timeout) + self._fsevents = FSEventsQueue(watch.path) + self._fsevents.start() + + def on_thread_stop(self): + self._fsevents.stop() + + def queue_events(self, timeout): + events = self._fsevents.read_events() + if events is None: + return + i = 0 + while i < len(events): + event = events[i] + + # For some reason the create and remove flags are sometimes also + # set for rename and modify type events, so let those take + # precedence. + if event.is_renamed: + # Internal moves appears to always be consecutive in the same + # buffer and have IDs differ by exactly one (while others + # don't) making it possible to pair up the two events coming + # from a singe move operation. (None of this is documented!) + # Otherwise, guess whether file was moved in or out. + #TODO: handle id wrapping + if (i+1 < len(events) and events[i+1].is_renamed and + events[i+1].event_id == event.event_id + 1): + cls = DirMovedEvent if event.is_directory else FileMovedEvent + self.queue_event(cls(event.path, events[i+1].path)) + self.queue_event(DirModifiedEvent(os.path.dirname(event.path))) + self.queue_event(DirModifiedEvent(os.path.dirname(events[i+1].path))) + i += 1 + elif os.path.exists(event.path): + cls = DirCreatedEvent if event.is_directory else FileCreatedEvent + self.queue_event(cls(event.path)) + self.queue_event(DirModifiedEvent(os.path.dirname(event.path))) + else: + cls = DirDeletedEvent if event.is_directory else FileDeletedEvent + self.queue_event(cls(event.path)) + self.queue_event(DirModifiedEvent(os.path.dirname(event.path))) + #TODO: generate events for tree + + elif event.is_modified or event.is_inode_meta_mod or event.is_xattr_mod : + cls = DirModifiedEvent if event.is_directory else FileModifiedEvent + self.queue_event(cls(event.path)) + + elif event.is_created: + cls = DirCreatedEvent if event.is_directory else FileCreatedEvent + self.queue_event(cls(event.path)) + self.queue_event(DirModifiedEvent(os.path.dirname(event.path))) + + elif event.is_removed: + cls = DirDeletedEvent if event.is_directory else FileDeletedEvent + self.queue_event(cls(event.path)) + self.queue_event(DirModifiedEvent(os.path.dirname(event.path))) + i += 1 + + +class FSEventsObserver2(BaseObserver): + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseObserver.__init__(self, emitter_class=FSEventsEmitter, timeout=timeout) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify.py new file mode 100644 index 0000000..e6de857 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.observers.inotify +:synopsis: ``inotify(7)`` based emitter implementation. +:author: Sebastien Martini +:author: Luke McCarthy +:author: yesudeep@google.com (Yesudeep Mangalapilly) +:author: Tim Cuthbertson +:platforms: Linux 2.6.13+. + +.. ADMONITION:: About system requirements + + Recommended minimum kernel version: 2.6.25. + + Quote from the inotify(7) man page: + + "Inotify was merged into the 2.6.13 Linux kernel. The required library + interfaces were added to glibc in version 2.4. (IN_DONT_FOLLOW, + IN_MASK_ADD, and IN_ONLYDIR were only added in version 2.5.)" + + Therefore, you must ensure the system is running at least these versions + appropriate libraries and the kernel. + +.. ADMONITION:: About recursiveness, event order, and event coalescing + + Quote from the inotify(7) man page: + + If successive output inotify events produced on the inotify file + descriptor are identical (same wd, mask, cookie, and name) then they + are coalesced into a single event if the older event has not yet been + read (but see BUGS). + + The events returned by reading from an inotify file descriptor form + an ordered queue. Thus, for example, it is guaranteed that when + renaming from one directory to another, events will be produced in + the correct order on the inotify file descriptor. + + ... + + Inotify monitoring of directories is not recursive: to monitor + subdirectories under a directory, additional watches must be created. + + This emitter implementation therefore automatically adds watches for + sub-directories if running in recursive mode. + +Some extremely useful articles and documentation: + +.. _inotify FAQ: http://inotify.aiken.cz/?section=inotify&page=faq&lang=en +.. _intro to inotify: http://www.linuxjournal.com/article/8478 + +""" + +from __future__ import with_statement + +import os +import threading +from .inotify_buffer import InotifyBuffer + +from watchdog.observers.api import ( + EventEmitter, + BaseObserver, + DEFAULT_EMITTER_TIMEOUT, + DEFAULT_OBSERVER_TIMEOUT +) + +from watchdog.events import ( + DirDeletedEvent, + DirModifiedEvent, + DirMovedEvent, + DirCreatedEvent, + FileDeletedEvent, + FileModifiedEvent, + FileMovedEvent, + FileCreatedEvent, + generate_sub_moved_events, + generate_sub_created_events, +) +from watchdog.utils import unicode_paths + + +class InotifyEmitter(EventEmitter): + """ + inotify(7)-based event emitter. + + :param event_queue: + The event queue to fill with events. + :param watch: + A watch object representing the directory to monitor. + :type watch: + :class:`watchdog.observers.api.ObservedWatch` + :param timeout: + Read events blocking timeout (in seconds). + :type timeout: + ``float`` + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + EventEmitter.__init__(self, event_queue, watch, timeout) + self._lock = threading.Lock() + self._inotify = None + + def on_thread_start(self): + path = unicode_paths.encode(self.watch.path) + self._inotify = InotifyBuffer(path, self.watch.is_recursive) + + def on_thread_stop(self): + if self._inotify: + self._inotify.close() + + def queue_events(self, timeout, full_events=False): + #If "full_events" is true, then the method will report unmatched move events as seperate events + #This behavior is by default only called by a InotifyFullEmitter + with self._lock: + event = self._inotify.read_event() + if event is None: + return + if isinstance(event, tuple): + move_from, move_to = event + src_path = self._decode_path(move_from.src_path) + dest_path = self._decode_path(move_to.src_path) + cls = DirMovedEvent if move_from.is_directory else FileMovedEvent + self.queue_event(cls(src_path, dest_path)) + self.queue_event(DirModifiedEvent(os.path.dirname(src_path))) + self.queue_event(DirModifiedEvent(os.path.dirname(dest_path))) + if move_from.is_directory and self.watch.is_recursive: + for sub_event in generate_sub_moved_events(src_path, dest_path): + self.queue_event(sub_event) + return + + src_path = self._decode_path(event.src_path) + if event.is_moved_to: + if (full_events): + cls = DirMovedEvent if event.is_directory else FileMovedEvent + self.queue_event(cls(None, src_path)) + else: + cls = DirCreatedEvent if event.is_directory else FileCreatedEvent + self.queue_event(cls(src_path)) + self.queue_event(DirModifiedEvent(os.path.dirname(src_path))) + if event.is_directory and self.watch.is_recursive: + for sub_event in generate_sub_created_events(src_path): + self.queue_event(sub_event) + elif event.is_attrib: + cls = DirModifiedEvent if event.is_directory else FileModifiedEvent + self.queue_event(cls(src_path)) + elif event.is_modify: + cls = DirModifiedEvent if event.is_directory else FileModifiedEvent + self.queue_event(cls(src_path)) + elif event.is_delete or (event.is_moved_from and not full_events): + cls = DirDeletedEvent if event.is_directory else FileDeletedEvent + self.queue_event(cls(src_path)) + self.queue_event(DirModifiedEvent(os.path.dirname(src_path))) + elif event.is_moved_from and full_events: + cls = DirMovedEvent if event.is_directory else FileMovedEvent + self.queue_event(cls(src_path, None)) + self.queue_event(DirModifiedEvent(os.path.dirname(src_path))) + elif event.is_create: + cls = DirCreatedEvent if event.is_directory else FileCreatedEvent + self.queue_event(cls(src_path)) + self.queue_event(DirModifiedEvent(os.path.dirname(src_path))) + + def _decode_path(self, path): + """ Decode path only if unicode string was passed to this emitter. """ + if isinstance(self.watch.path, bytes): + return path + return unicode_paths.decode(path) + + +class InotifyFullEmitter(InotifyEmitter): + """ + inotify(7)-based event emitter. By default this class produces move events even if they are not matched + Such move events will have a ``None`` value for the unmatched part. + + :param event_queue: + The event queue to fill with events. + :param watch: + A watch object representing the directory to monitor. + :type watch: + :class:`watchdog.observers.api.ObservedWatch` + :param timeout: + Read events blocking timeout (in seconds). + :type timeout: + ``float`` + """ + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + InotifyEmitter.__init__(self, event_queue, watch, timeout) + + def queue_events(self, timeout, events=True): + InotifyEmitter.queue_events(self, timeout, full_events=events) + +class InotifyObserver(BaseObserver): + """ + Observer thread that schedules watching directories and dispatches + calls to event handlers. + """ + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT, generate_full_events=False): + if (generate_full_events): + BaseObserver.__init__(self, emitter_class=InotifyFullEmitter, timeout=timeout) + else: + BaseObserver.__init__(self, emitter_class=InotifyEmitter, + timeout=timeout) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_buffer.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_buffer.py new file mode 100644 index 0000000..dce2ae1 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_buffer.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from watchdog.utils import BaseThread +from watchdog.utils.delayed_queue import DelayedQueue +from watchdog.observers.inotify_c import Inotify + +logger = logging.getLogger(__name__) + + +class InotifyBuffer(BaseThread): + """A wrapper for `Inotify` that holds events for `delay` seconds. During + this time, IN_MOVED_FROM and IN_MOVED_TO events are paired. + """ + + delay = 0.5 + + def __init__(self, path, recursive=False): + BaseThread.__init__(self) + self._queue = DelayedQueue(self.delay) + self._inotify = Inotify(path, recursive) + self.start() + + def read_event(self): + """Returns a single event or a tuple of from/to events in case of a + paired move event. If this buffer has been closed, immediately return + None. + """ + return self._queue.get() + + def on_thread_stop(self): + self._inotify.close() + self._queue.close() + + def close(self): + self.stop() + self.join() + + def run(self): + """Read event from `inotify` and add them to `queue`. When reading a + IN_MOVE_TO event, remove the previous added matching IN_MOVE_FROM event + and add them back to the queue as a tuple. + """ + deleted_self = False + while self.should_keep_running() and not deleted_self: + inotify_events = self._inotify.read_events() + for inotify_event in inotify_events: + logger.debug("in-event %s", inotify_event) + if inotify_event.is_moved_to: + + def matching_from_event(event): + return (not isinstance(event, tuple) and event.is_moved_from + and event.cookie == inotify_event.cookie) + + from_event = self._queue.remove(matching_from_event) + if from_event is not None: + self._queue.put((from_event, inotify_event)) + else: + logger.debug("could not find matching move_from event") + self._queue.put(inotify_event) + else: + self._queue.put(inotify_event) + + if inotify_event.is_delete_self and \ + inotify_event.src_path == self._inotify.path: + # Deleted the watched directory, stop watching for events + deleted_self = True diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_c.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_c.py new file mode 100644 index 0000000..0dc7b50 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/inotify_c.py @@ -0,0 +1,575 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import with_statement +import os +import errno +import struct +import threading +import ctypes +import ctypes.util +from functools import reduce +from ctypes import c_int, c_char_p, c_uint32 +from watchdog.utils import has_attribute +from watchdog.utils import UnsupportedLibc + + +def _load_libc(): + libc_path = None + try: + libc_path = ctypes.util.find_library('c') + except (OSError, IOError, RuntimeError): + # Note: find_library will on some platforms raise these undocumented + # errors, e.g.on android IOError "No usable temporary directory found" + # will be raised. + pass + + if libc_path is not None: + return ctypes.CDLL(libc_path) + + # Fallbacks + try: + return ctypes.CDLL('libc.so') + except (OSError, IOError): + pass + + try: + return ctypes.CDLL('libc.so.6') + except (OSError, IOError): + pass + + # uClibc + try: + return ctypes.CDLL('libc.so.0') + except (OSError, IOError) as err: + raise err + + +libc = _load_libc() + +if not has_attribute(libc, 'inotify_init') or \ + not has_attribute(libc, 'inotify_add_watch') or \ + not has_attribute(libc, 'inotify_rm_watch'): + raise UnsupportedLibc("Unsupported libc version found: %s" % libc._name) + +inotify_add_watch = ctypes.CFUNCTYPE(c_int, c_int, c_char_p, c_uint32, use_errno=True)( + ("inotify_add_watch", libc)) + +inotify_rm_watch = ctypes.CFUNCTYPE(c_int, c_int, c_uint32, use_errno=True)( + ("inotify_rm_watch", libc)) + +inotify_init = ctypes.CFUNCTYPE(c_int, use_errno=True)( + ("inotify_init", libc)) + + +class InotifyConstants(object): + # User-space events + IN_ACCESS = 0x00000001 # File was accessed. + IN_MODIFY = 0x00000002 # File was modified. + IN_ATTRIB = 0x00000004 # Meta-data changed. + IN_CLOSE_WRITE = 0x00000008 # Writable file was closed. + IN_CLOSE_NOWRITE = 0x00000010 # Unwritable file closed. + IN_OPEN = 0x00000020 # File was opened. + IN_MOVED_FROM = 0x00000040 # File was moved from X. + IN_MOVED_TO = 0x00000080 # File was moved to Y. + IN_CREATE = 0x00000100 # Subfile was created. + IN_DELETE = 0x00000200 # Subfile was deleted. + IN_DELETE_SELF = 0x00000400 # Self was deleted. + IN_MOVE_SELF = 0x00000800 # Self was moved. + + # Helper user-space events. + IN_CLOSE = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE # Close. + IN_MOVE = IN_MOVED_FROM | IN_MOVED_TO # Moves. + + # Events sent by the kernel to a watch. + IN_UNMOUNT = 0x00002000 # Backing file system was unmounted. + IN_Q_OVERFLOW = 0x00004000 # Event queued overflowed. + IN_IGNORED = 0x00008000 # File was ignored. + + # Special flags. + IN_ONLYDIR = 0x01000000 # Only watch the path if it's a directory. + IN_DONT_FOLLOW = 0x02000000 # Do not follow a symbolic link. + IN_EXCL_UNLINK = 0x04000000 # Exclude events on unlinked objects + IN_MASK_ADD = 0x20000000 # Add to the mask of an existing watch. + IN_ISDIR = 0x40000000 # Event occurred against directory. + IN_ONESHOT = 0x80000000 # Only send event once. + + # All user-space events. + IN_ALL_EVENTS = reduce( + lambda x, y: x | y, [ + IN_ACCESS, + IN_MODIFY, + IN_ATTRIB, + IN_CLOSE_WRITE, + IN_CLOSE_NOWRITE, + IN_OPEN, + IN_MOVED_FROM, + IN_MOVED_TO, + IN_DELETE, + IN_CREATE, + IN_DELETE_SELF, + IN_MOVE_SELF, + ]) + + # Flags for ``inotify_init1`` + IN_CLOEXEC = 0x02000000 + IN_NONBLOCK = 0x00004000 + + +# Watchdog's API cares only about these events. +WATCHDOG_ALL_EVENTS = reduce( + lambda x, y: x | y, [ + InotifyConstants.IN_MODIFY, + InotifyConstants.IN_ATTRIB, + InotifyConstants.IN_MOVED_FROM, + InotifyConstants.IN_MOVED_TO, + InotifyConstants.IN_CREATE, + InotifyConstants.IN_DELETE, + InotifyConstants.IN_DELETE_SELF, + InotifyConstants.IN_DONT_FOLLOW, + ]) + + +class inotify_event_struct(ctypes.Structure): + """ + Structure representation of the inotify_event structure + (used in buffer size calculations):: + + struct inotify_event { + __s32 wd; /* watch descriptor */ + __u32 mask; /* watch mask */ + __u32 cookie; /* cookie to synchronize two events */ + __u32 len; /* length (including nulls) of name */ + char name[0]; /* stub for possible name */ + }; + """ + _fields_ = [('wd', c_int), + ('mask', c_uint32), + ('cookie', c_uint32), + ('len', c_uint32), + ('name', c_char_p)] + + +EVENT_SIZE = ctypes.sizeof(inotify_event_struct) +DEFAULT_NUM_EVENTS = 2048 +DEFAULT_EVENT_BUFFER_SIZE = DEFAULT_NUM_EVENTS * (EVENT_SIZE + 16) + + +class Inotify(object): + """ + Linux inotify(7) API wrapper class. + + :param path: + The directory path for which we want an inotify object. + :type path: + :class:`bytes` + :param recursive: + ``True`` if subdirectories should be monitored; ``False`` otherwise. + """ + + def __init__(self, path, recursive=False, event_mask=WATCHDOG_ALL_EVENTS): + # The file descriptor associated with the inotify instance. + inotify_fd = inotify_init() + if inotify_fd == -1: + Inotify._raise_error() + self._inotify_fd = inotify_fd + self._lock = threading.Lock() + + # Stores the watch descriptor for a given path. + self._wd_for_path = dict() + self._path_for_wd = dict() + + self._path = path + self._event_mask = event_mask + self._is_recursive = recursive + self._add_dir_watch(path, recursive, event_mask) + self._moved_from_events = dict() + + @property + def event_mask(self): + """The event mask for this inotify instance.""" + return self._event_mask + + @property + def path(self): + """The path associated with the inotify instance.""" + return self._path + + @property + def is_recursive(self): + """Whether we are watching directories recursively.""" + return self._is_recursive + + @property + def fd(self): + """The file descriptor associated with the inotify instance.""" + return self._inotify_fd + + def clear_move_records(self): + """Clear cached records of MOVED_FROM events""" + self._moved_from_events = dict() + + def source_for_move(self, destination_event): + """ + The source path corresponding to the given MOVED_TO event. + + If the source path is outside the monitored directories, None + is returned instead. + """ + if destination_event.cookie in self._moved_from_events: + return self._moved_from_events[destination_event.cookie].src_path + else: + return None + + def remember_move_from_event(self, event): + """ + Save this event as the source event for future MOVED_TO events to + reference. + """ + self._moved_from_events[event.cookie] = event + + def add_watch(self, path): + """ + Adds a watch for the given path. + + :param path: + Path to begin monitoring. + """ + with self._lock: + self._add_watch(path, self._event_mask) + + def remove_watch(self, path): + """ + Removes a watch for the given path. + + :param path: + Path string for which the watch will be removed. + """ + with self._lock: + wd = self._wd_for_path.pop(path) + del self._path_for_wd[wd] + if inotify_rm_watch(self._inotify_fd, wd) == -1: + Inotify._raise_error() + + def close(self): + """ + Closes the inotify instance and removes all associated watches. + """ + with self._lock: + if self._path in self._wd_for_path: + wd = self._wd_for_path[self._path] + inotify_rm_watch(self._inotify_fd, wd) + os.close(self._inotify_fd) + + def read_events(self, event_buffer_size=DEFAULT_EVENT_BUFFER_SIZE): + """ + Reads events from inotify and yields them. + """ + # HACK: We need to traverse the directory path + # recursively and simulate events for newly + # created subdirectories/files. This will handle + # mkdir -p foobar/blah/bar; touch foobar/afile + + def _recursive_simulate(src_path): + events = [] + for root, dirnames, filenames in os.walk(src_path): + for dirname in dirnames: + try: + full_path = os.path.join(root, dirname) + wd_dir = self._add_watch(full_path, self._event_mask) + e = InotifyEvent( + wd_dir, InotifyConstants.IN_CREATE | InotifyConstants.IN_ISDIR, 0, dirname, full_path) + events.append(e) + except OSError: + pass + for filename in filenames: + full_path = os.path.join(root, filename) + wd_parent_dir = self._wd_for_path[os.path.dirname(full_path)] + e = InotifyEvent( + wd_parent_dir, InotifyConstants.IN_CREATE, 0, filename, full_path) + events.append(e) + return events + + event_buffer = None + while True: + try: + event_buffer = os.read(self._inotify_fd, event_buffer_size) + except OSError as e: + if e.errno == errno.EINTR: + continue + break + + with self._lock: + event_list = [] + for wd, mask, cookie, name in Inotify._parse_event_buffer(event_buffer): + if wd == -1: + continue + wd_path = self._path_for_wd[wd] + src_path = os.path.join(wd_path, name) if name else wd_path #avoid trailing slash + inotify_event = InotifyEvent(wd, mask, cookie, name, src_path) + + if inotify_event.is_moved_from: + self.remember_move_from_event(inotify_event) + elif inotify_event.is_moved_to: + move_src_path = self.source_for_move(inotify_event) + if move_src_path in self._wd_for_path: + moved_wd = self._wd_for_path[move_src_path] + del self._wd_for_path[move_src_path] + self._wd_for_path[inotify_event.src_path] = moved_wd + self._path_for_wd[moved_wd] = inotify_event.src_path + src_path = os.path.join(wd_path, name) + inotify_event = InotifyEvent(wd, mask, cookie, name, src_path) + + if inotify_event.is_ignored: + # Clean up book-keeping for deleted watches. + path = self._path_for_wd.pop(wd) + if self._wd_for_path[path] == wd: + del self._wd_for_path[path] + continue + + event_list.append(inotify_event) + + if (self.is_recursive and inotify_event.is_directory and + inotify_event.is_create): + + # TODO: When a directory from another part of the + # filesystem is moved into a watched directory, this + # will not generate events for the directory tree. + # We need to coalesce IN_MOVED_TO events and those + # IN_MOVED_TO events which don't pair up with + # IN_MOVED_FROM events should be marked IN_CREATE + # instead relative to this directory. + try: + self._add_watch(src_path, self._event_mask) + except OSError: + continue + + event_list.extend(_recursive_simulate(src_path)) + + return event_list + + # Non-synchronized methods. + def _add_dir_watch(self, path, recursive, mask): + """ + Adds a watch (optionally recursively) for the given directory path + to monitor events specified by the mask. + + :param path: + Path to monitor + :param recursive: + ``True`` to monitor recursively. + :param mask: + Event bit mask. + """ + if not os.path.isdir(path): + raise OSError('Path is not a directory') + self._add_watch(path, mask) + if recursive: + for root, dirnames, _ in os.walk(path): + for dirname in dirnames: + full_path = os.path.join(root, dirname) + if os.path.islink(full_path): + continue + self._add_watch(full_path, mask) + + def _add_watch(self, path, mask): + """ + Adds a watch for the given path to monitor events specified by the + mask. + + :param path: + Path to monitor + :param mask: + Event bit mask. + """ + wd = inotify_add_watch(self._inotify_fd, path, mask) + if wd == -1: + Inotify._raise_error() + self._wd_for_path[path] = wd + self._path_for_wd[wd] = path + return wd + + @staticmethod + def _raise_error(): + """ + Raises errors for inotify failures. + """ + err = ctypes.get_errno() + if err == errno.ENOSPC: + raise OSError("inotify watch limit reached") + elif err == errno.EMFILE: + raise OSError("inotify instance limit reached") + else: + raise OSError(os.strerror(err)) + + @staticmethod + def _parse_event_buffer(event_buffer): + """ + Parses an event buffer of ``inotify_event`` structs returned by + inotify:: + + struct inotify_event { + __s32 wd; /* watch descriptor */ + __u32 mask; /* watch mask */ + __u32 cookie; /* cookie to synchronize two events */ + __u32 len; /* length (including nulls) of name */ + char name[0]; /* stub for possible name */ + }; + + The ``cookie`` member of this struct is used to pair two related + events, for example, it pairs an IN_MOVED_FROM event with an + IN_MOVED_TO event. + """ + i = 0 + while i + 16 <= len(event_buffer): + wd, mask, cookie, length = struct.unpack_from('iIII', event_buffer, i) + name = event_buffer[i + 16:i + 16 + length].rstrip(b'\0') + i += 16 + length + yield wd, mask, cookie, name + + +class InotifyEvent(object): + """ + Inotify event struct wrapper. + + :param wd: + Watch descriptor + :param mask: + Event mask + :param cookie: + Event cookie + :param name: + Event name. + :param src_path: + Event source path + """ + + def __init__(self, wd, mask, cookie, name, src_path): + self._wd = wd + self._mask = mask + self._cookie = cookie + self._name = name + self._src_path = src_path + + @property + def src_path(self): + return self._src_path + + @property + def wd(self): + return self._wd + + @property + def mask(self): + return self._mask + + @property + def cookie(self): + return self._cookie + + @property + def name(self): + return self._name + + @property + def is_modify(self): + return self._mask & InotifyConstants.IN_MODIFY > 0 + + @property + def is_close_write(self): + return self._mask & InotifyConstants.IN_CLOSE_WRITE > 0 + + @property + def is_close_nowrite(self): + return self._mask & InotifyConstants.IN_CLOSE_NOWRITE > 0 + + @property + def is_access(self): + return self._mask & InotifyConstants.IN_ACCESS > 0 + + @property + def is_delete(self): + return self._mask & InotifyConstants.IN_DELETE > 0 + + @property + def is_delete_self(self): + return self._mask & InotifyConstants.IN_DELETE_SELF > 0 + + @property + def is_create(self): + return self._mask & InotifyConstants.IN_CREATE > 0 + + @property + def is_moved_from(self): + return self._mask & InotifyConstants.IN_MOVED_FROM > 0 + + @property + def is_moved_to(self): + return self._mask & InotifyConstants.IN_MOVED_TO > 0 + + @property + def is_move(self): + return self._mask & InotifyConstants.IN_MOVE > 0 + + @property + def is_move_self(self): + return self._mask & InotifyConstants.IN_MOVE_SELF > 0 + + @property + def is_attrib(self): + return self._mask & InotifyConstants.IN_ATTRIB > 0 + + @property + def is_ignored(self): + return self._mask & InotifyConstants.IN_IGNORED > 0 + + @property + def is_directory(self): + # It looks like the kernel does not provide this information for + # IN_DELETE_SELF and IN_MOVE_SELF. In this case, assume it's a dir. + # See also: https://github.com/seb-m/pyinotify/blob/2c7e8f8/python2/pyinotify.py#L897 + return (self.is_delete_self or self.is_move_self or + self._mask & InotifyConstants.IN_ISDIR > 0) + + @property + def key(self): + return self._src_path, self._wd, self._mask, self._cookie, self._name + + def __eq__(self, inotify_event): + return self.key == inotify_event.key + + def __ne__(self, inotify_event): + return self.key == inotify_event.key + + def __hash__(self): + return hash(self.key) + + @staticmethod + def _get_mask_string(mask): + masks = [] + for c in dir(InotifyConstants): + if c.startswith('IN_') and c not in ['IN_ALL_EVENTS', 'IN_CLOSE', 'IN_MOVE']: + c_val = getattr(InotifyConstants, c) + if mask & c_val: + masks.append(c) + mask_string = '|'.join(masks) + return mask_string + + def __repr__(self): + mask_string = self._get_mask_string(self.mask) + s = "" + return s % (self.src_path, self.wd, mask_string, self.cookie, self.name) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/kqueue.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/kqueue.py new file mode 100644 index 0000000..9ace923 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/kqueue.py @@ -0,0 +1,726 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.observers.kqueue +:synopsis: ``kqueue(2)`` based emitter implementation. +:author: yesudeep@google.com (Yesudeep Mangalapilly) +:platforms: Mac OS X and BSD with kqueue(2). + +.. WARNING:: kqueue is a very heavyweight way to monitor file systems. + Each kqueue-detected directory modification triggers + a full directory scan. Traversing the entire directory tree + and opening file descriptors for all files will create + performance problems. We need to find a way to re-scan + only those directories which report changes and do a diff + between two sub-DirectorySnapshots perhaps. + +.. ADMONITION:: About ``select.kqueue`` and Python versions + + * Python 2.5 does not ship with ``select.kqueue`` + * Python 2.6 ships with a broken ``select.kqueue`` that cannot take + multiple events in the event list passed to ``kqueue.control``. + * Python 2.7 ships with a working ``select.kqueue`` + implementation. + + I have backported the Python 2.7 implementation to Python 2.5 and 2.6 + in the ``select_backport`` package available on PyPI. + +.. ADMONITION:: About OS X performance guidelines + + Quote from the `Mac OS X File System Performance Guidelines`_: + + "When you only want to track changes on a file or directory, be sure to + open it using the ``O_EVTONLY`` flag. This flag prevents the file or + directory from being marked as open or in use. This is important + if you are tracking files on a removable volume and the user tries to + unmount the volume. With this flag in place, the system knows it can + dismiss the volume. If you had opened the files or directories without + this flag, the volume would be marked as busy and would not be + unmounted." + + ``O_EVTONLY`` is defined as ``0x8000`` in the OS X header files. + More information here: http://www.mlsite.net/blog/?p=2312 + +Classes +------- +.. autoclass:: KqueueEmitter + :members: + :show-inheritance: + +Collections and Utility Classes +------------------------------- +.. autoclass:: KeventDescriptor + :members: + :show-inheritance: + +.. autoclass:: KeventDescriptorSet + :members: + :show-inheritance: + +.. _Mac OS X File System Performance Guidelines: http://developer.apple.com/library/ios/#documentation/Performance/Conceptual/FileSystem/Articles/TrackingChanges.html#//apple_ref/doc/uid/20001993-CJBJFIDD + +""" + +from __future__ import with_statement +from watchdog.utils import platform + +import threading +import errno +import sys +import stat +import os + +# See the notes for this module in the documentation above ^. +#import select +# if not has_attribute(select, 'kqueue') or sys.version_info < (2, 7, 0): +if sys.version_info < (2, 7, 0): + import select_backport as select +else: + import select + +from pathtools.path import absolute_path + +from watchdog.observers.api import ( + BaseObserver, + EventEmitter, + DEFAULT_OBSERVER_TIMEOUT, + DEFAULT_EMITTER_TIMEOUT +) + +from watchdog.utils.dirsnapshot import DirectorySnapshot + +from watchdog.events import ( + DirMovedEvent, + DirDeletedEvent, + DirCreatedEvent, + DirModifiedEvent, + FileMovedEvent, + FileDeletedEvent, + FileCreatedEvent, + FileModifiedEvent, + EVENT_TYPE_MOVED, + EVENT_TYPE_DELETED, + EVENT_TYPE_CREATED +) + +# Maximum number of events to process. +MAX_EVENTS = 4096 + +# O_EVTONLY value from the header files for OS X only. +O_EVTONLY = 0x8000 + +# Pre-calculated values for the kevent filter, flags, and fflags attributes. +if platform.is_darwin(): + WATCHDOG_OS_OPEN_FLAGS = O_EVTONLY +else: + WATCHDOG_OS_OPEN_FLAGS = os.O_RDONLY | os.O_NONBLOCK +WATCHDOG_KQ_FILTER = select.KQ_FILTER_VNODE +WATCHDOG_KQ_EV_FLAGS = select.KQ_EV_ADD | select.KQ_EV_ENABLE | select.KQ_EV_CLEAR +WATCHDOG_KQ_FFLAGS = ( + select.KQ_NOTE_DELETE | + select.KQ_NOTE_WRITE | + select.KQ_NOTE_EXTEND | + select.KQ_NOTE_ATTRIB | + select.KQ_NOTE_LINK | + select.KQ_NOTE_RENAME | + select.KQ_NOTE_REVOKE +) + +# Flag tests. + + +def is_deleted(kev): + """Determines whether the given kevent represents deletion.""" + return kev.fflags & select.KQ_NOTE_DELETE + + +def is_modified(kev): + """Determines whether the given kevent represents modification.""" + fflags = kev.fflags + return (fflags & select.KQ_NOTE_EXTEND) or (fflags & select.KQ_NOTE_WRITE) + + +def is_attrib_modified(kev): + """Determines whether the given kevent represents attribute modification.""" + return kev.fflags & select.KQ_NOTE_ATTRIB + + +def is_renamed(kev): + """Determines whether the given kevent represents movement.""" + return kev.fflags & select.KQ_NOTE_RENAME + + +class KeventDescriptorSet(object): + + """ + Thread-safe kevent descriptor collection. + """ + + def __init__(self): + # Set of KeventDescriptor + self._descriptors = set() + + # Descriptor for a given path. + self._descriptor_for_path = dict() + + # Descriptor for a given fd. + self._descriptor_for_fd = dict() + + # List of kevent objects. + self._kevents = list() + + self._lock = threading.Lock() + + @property + def kevents(self): + """ + List of kevents monitored. + """ + with self._lock: + return self._kevents + + @property + def paths(self): + """ + List of paths for which kevents have been created. + """ + with self._lock: + return list(self._descriptor_for_path.keys()) + + def get_for_fd(self, fd): + """ + Given a file descriptor, returns the kevent descriptor object + for it. + + :param fd: + OS file descriptor. + :type fd: + ``int`` + :returns: + A :class:`KeventDescriptor` object. + """ + with self._lock: + return self._descriptor_for_fd[fd] + + def get(self, path): + """ + Obtains a :class:`KeventDescriptor` object for the specified path. + + :param path: + Path for which the descriptor will be obtained. + """ + with self._lock: + path = absolute_path(path) + return self._get(path) + + def __contains__(self, path): + """ + Determines whether a :class:`KeventDescriptor has been registered + for the specified path. + + :param path: + Path for which the descriptor will be obtained. + """ + with self._lock: + path = absolute_path(path) + return self._has_path(path) + + def add(self, path, is_directory): + """ + Adds a :class:`KeventDescriptor` to the collection for the given + path. + + :param path: + The path for which a :class:`KeventDescriptor` object will be + added. + :param is_directory: + ``True`` if the path refers to a directory; ``False`` otherwise. + :type is_directory: + ``bool`` + """ + with self._lock: + path = absolute_path(path) + if not self._has_path(path): + self._add_descriptor(KeventDescriptor(path, is_directory)) + + def remove(self, path): + """ + Removes the :class:`KeventDescriptor` object for the given path + if it already exists. + + :param path: + Path for which the :class:`KeventDescriptor` object will be + removed. + """ + with self._lock: + path = absolute_path(path) + if self._has_path(path): + self._remove_descriptor(self._get(path)) + + def clear(self): + """ + Clears the collection and closes all open descriptors. + """ + with self._lock: + for descriptor in self._descriptors: + descriptor.close() + self._descriptors.clear() + self._descriptor_for_fd.clear() + self._descriptor_for_path.clear() + self._kevents = [] + + # Thread-unsafe methods. Locking is provided at a higher level. + def _get(self, path): + """Returns a kevent descriptor for a given path.""" + return self._descriptor_for_path[path] + + def _has_path(self, path): + """Determines whether a :class:`KeventDescriptor` for the specified + path exists already in the collection.""" + return path in self._descriptor_for_path + + def _add_descriptor(self, descriptor): + """ + Adds a descriptor to the collection. + + :param descriptor: + An instance of :class:`KeventDescriptor` to be added. + """ + self._descriptors.add(descriptor) + self._kevents.append(descriptor.kevent) + self._descriptor_for_path[descriptor.path] = descriptor + self._descriptor_for_fd[descriptor.fd] = descriptor + + def _remove_descriptor(self, descriptor): + """ + Removes a descriptor from the collection. + + :param descriptor: + An instance of :class:`KeventDescriptor` to be removed. + """ + self._descriptors.remove(descriptor) + del self._descriptor_for_fd[descriptor.fd] + del self._descriptor_for_path[descriptor.path] + self._kevents.remove(descriptor.kevent) + descriptor.close() + + +class KeventDescriptor(object): + + """ + A kevent descriptor convenience data structure to keep together: + + * kevent + * directory status + * path + * file descriptor + + :param path: + Path string for which a kevent descriptor will be created. + :param is_directory: + ``True`` if the path refers to a directory; ``False`` otherwise. + :type is_directory: + ``bool`` + """ + + def __init__(self, path, is_directory): + self._path = absolute_path(path) + self._is_directory = is_directory + self._fd = os.open(path, WATCHDOG_OS_OPEN_FLAGS) + self._kev = select.kevent(self._fd, + filter=WATCHDOG_KQ_FILTER, + flags=WATCHDOG_KQ_EV_FLAGS, + fflags=WATCHDOG_KQ_FFLAGS) + + @property + def fd(self): + """OS file descriptor for the kevent descriptor.""" + return self._fd + + @property + def path(self): + """The path associated with the kevent descriptor.""" + return self._path + + @property + def kevent(self): + """The kevent object associated with the kevent descriptor.""" + return self._kev + + @property + def is_directory(self): + """Determines whether the kevent descriptor refers to a directory. + + :returns: + ``True`` or ``False`` + """ + return self._is_directory + + def close(self): + """ + Closes the file descriptor associated with a kevent descriptor. + """ + try: + os.close(self.fd) + except OSError: + pass + + @property + def key(self): + return (self.path, self.is_directory) + + def __eq__(self, descriptor): + return self.key == descriptor.key + + def __ne__(self, descriptor): + return self.key != descriptor.key + + def __hash__(self): + return hash(self.key) + + def __repr__(self): + return ""\ + % (self.path, self.is_directory) + + +class KqueueEmitter(EventEmitter): + + """ + kqueue(2)-based event emitter. + + .. ADMONITION:: About ``kqueue(2)`` behavior and this implementation + + ``kqueue(2)`` monitors file system events only for + open descriptors, which means, this emitter does a lot of + book-keeping behind the scenes to keep track of open + descriptors for every entry in the monitored directory tree. + + This also means the number of maximum open file descriptors + on your system must be increased **manually**. + Usually, issuing a call to ``ulimit`` should suffice:: + + ulimit -n 1024 + + Ensure that you pick a number that is larger than the + number of files you expect to be monitored. + + ``kqueue(2)`` does not provide enough information about the + following things: + + * The destination path of a file or directory that is renamed. + * Creation of a file or directory within a directory; in this + case, ``kqueue(2)`` only indicates a modified event on the + parent directory. + + Therefore, this emitter takes a snapshot of the directory + tree when ``kqueue(2)`` detects a change on the file system + to be able to determine the above information. + + :param event_queue: + The event queue to fill with events. + :param watch: + A watch object representing the directory to monitor. + :type watch: + :class:`watchdog.observers.api.ObservedWatch` + :param timeout: + Read events blocking timeout (in seconds). + :type timeout: + ``float`` + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + EventEmitter.__init__(self, event_queue, watch, timeout) + + self._kq = select.kqueue() + self._lock = threading.RLock() + + # A collection of KeventDescriptor. + self._descriptors = KeventDescriptorSet() + + def walker_callback(path, stat_info, self=self): + self._register_kevent(path, stat.S_ISDIR(stat_info.st_mode)) + + self._snapshot = DirectorySnapshot(watch.path, + watch.is_recursive, + walker_callback) + + def _register_kevent(self, path, is_directory): + """ + Registers a kevent descriptor for the given path. + + :param path: + Path for which a kevent descriptor will be created. + :param is_directory: + ``True`` if the path refers to a directory; ``False`` otherwise. + :type is_directory: + ``bool`` + """ + try: + self._descriptors.add(path, is_directory) + except OSError as e: + if e.errno == errno.ENOENT: + # Probably dealing with a temporary file that was created + # and then quickly deleted before we could open + # a descriptor for it. Therefore, simply queue a sequence + # of created and deleted events for the path. + #path = absolute_path(path) + # if is_directory: + # self.queue_event(DirCreatedEvent(path)) + # self.queue_event(DirDeletedEvent(path)) + # else: + # self.queue_event(FileCreatedEvent(path)) + # self.queue_event(FileDeletedEvent(path)) + + # TODO: We could simply ignore these files. + # Locked files cause the python process to die with + # a bus error when we handle temporary files. + # eg. .git/index.lock when running tig operations. + # I don't fully understand this at the moment. + pass + else: + # All other errors are propagated. + raise + + def _unregister_kevent(self, path): + """ + Convenience function to close the kevent descriptor for a + specified kqueue-monitored path. + + :param path: + Path for which the kevent descriptor will be closed. + """ + self._descriptors.remove(path) + + def queue_event(self, event): + """ + Handles queueing a single event object. + + :param event: + An instance of :class:`watchdog.events.FileSystemEvent` + or a subclass. + """ + # Handles all the book keeping for queued events. + # We do not need to fire moved/deleted events for all subitems in + # a directory tree here, because this function is called by kqueue + # for all those events anyway. + EventEmitter.queue_event(self, event) + if event.event_type == EVENT_TYPE_CREATED: + self._register_kevent(event.src_path, event.is_directory) + elif event.event_type == EVENT_TYPE_MOVED: + self._unregister_kevent(event.src_path) + self._register_kevent(event.dest_path, event.is_directory) + elif event.event_type == EVENT_TYPE_DELETED: + self._unregister_kevent(event.src_path) + + def _queue_dirs_modified(self, + dirs_modified, + ref_snapshot, + new_snapshot): + """ + Queues events for directory modifications by scanning the directory + for changes. + + A scan is a comparison between two snapshots of the same directory + taken at two different times. This also determines whether files + or directories were created, which updated the modified timestamp + for the directory. + """ + if dirs_modified: + for dir_modified in dirs_modified: + self.queue_event(DirModifiedEvent(dir_modified)) + diff_events = new_snapshot - ref_snapshot + for file_created in diff_events.files_created: + self.queue_event(FileCreatedEvent(file_created)) + for directory_created in diff_events.dirs_created: + self.queue_event(DirCreatedEvent(directory_created)) + + def _queue_events_except_renames_and_dir_modifications(self, event_list): + """ + Queues events from the kevent list returned from the call to + :meth:`select.kqueue.control`. + + .. NOTE:: Queues only the deletions, file modifications, + attribute modifications. The other events, namely, + file creation, directory modification, file rename, + directory rename, directory creation, etc. are + determined by comparing directory snapshots. + """ + files_renamed = set() + dirs_renamed = set() + dirs_modified = set() + + for kev in event_list: + descriptor = self._descriptors.get_for_fd(kev.ident) + src_path = descriptor.path + + if is_deleted(kev): + if descriptor.is_directory: + self.queue_event(DirDeletedEvent(src_path)) + else: + self.queue_event(FileDeletedEvent(src_path)) + elif is_attrib_modified(kev): + if descriptor.is_directory: + self.queue_event(DirModifiedEvent(src_path)) + else: + self.queue_event(FileModifiedEvent(src_path)) + elif is_modified(kev): + if descriptor.is_directory: + # When a directory is modified, it may be due to + # sub-file/directory renames or new file/directory + # creation. We determine all this by comparing + # snapshots later. + dirs_modified.add(src_path) + else: + self.queue_event(FileModifiedEvent(src_path)) + elif is_renamed(kev): + # Kqueue does not specify the destination names for renames + # to, so we have to process these after taking a snapshot + # of the directory. + if descriptor.is_directory: + dirs_renamed.add(src_path) + else: + files_renamed.add(src_path) + return files_renamed, dirs_renamed, dirs_modified + + def _queue_renamed(self, + src_path, + is_directory, + ref_snapshot, + new_snapshot): + """ + Compares information from two directory snapshots (one taken before + the rename operation and another taken right after) to determine the + destination path of the file system object renamed, and adds + appropriate events to the event queue. + """ + try: + ref_stat_info = ref_snapshot.stat_info(src_path) + except KeyError: + # Probably caught a temporary file/directory that was renamed + # and deleted. Fires a sequence of created and deleted events + # for the path. + if is_directory: + self.queue_event(DirCreatedEvent(src_path)) + self.queue_event(DirDeletedEvent(src_path)) + else: + self.queue_event(FileCreatedEvent(src_path)) + self.queue_event(FileDeletedEvent(src_path)) + # We don't process any further and bail out assuming + # the event represents deletion/creation instead of movement. + return + + try: + dest_path = absolute_path( + new_snapshot.path_for_inode(ref_stat_info.st_ino)) + if is_directory: + event = DirMovedEvent(src_path, dest_path) + # TODO: Do we need to fire moved events for the items + # inside the directory tree? Does kqueue does this + # all by itself? Check this and then enable this code + # only if it doesn't already. + # A: It doesn't. So I've enabled this block. + if self.watch.is_recursive: + for sub_event in event.sub_moved_events(): + self.queue_event(sub_event) + self.queue_event(event) + else: + self.queue_event(FileMovedEvent(src_path, dest_path)) + except KeyError: + # If the new snapshot does not have an inode for the + # old path, we haven't found the new name. Therefore, + # we mark it as deleted and remove unregister the path. + if is_directory: + self.queue_event(DirDeletedEvent(src_path)) + else: + self.queue_event(FileDeletedEvent(src_path)) + + def _read_events(self, timeout=None): + """ + Reads events from a call to the blocking + :meth:`select.kqueue.control()` method. + + :param timeout: + Blocking timeout for reading events. + :type timeout: + ``float`` (seconds) + """ + return self._kq.control(self._descriptors.kevents, + MAX_EVENTS, + timeout) + + def queue_events(self, timeout): + """ + Queues events by reading them from a call to the blocking + :meth:`select.kqueue.control()` method. + + :param timeout: + Blocking timeout for reading events. + :type timeout: + ``float`` (seconds) + """ + with self._lock: + try: + event_list = self._read_events(timeout) + files_renamed, dirs_renamed, dirs_modified = ( + self._queue_events_except_renames_and_dir_modifications(event_list)) + + # Take a fresh snapshot of the directory and update the + # saved snapshot. + new_snapshot = DirectorySnapshot(self.watch.path, + self.watch.is_recursive) + ref_snapshot = self._snapshot + self._snapshot = new_snapshot + + if files_renamed or dirs_renamed or dirs_modified: + for src_path in files_renamed: + self._queue_renamed(src_path, + False, + ref_snapshot, + new_snapshot) + for src_path in dirs_renamed: + self._queue_renamed(src_path, + True, + ref_snapshot, + new_snapshot) + self._queue_dirs_modified(dirs_modified, + ref_snapshot, + new_snapshot) + except OSError as e: + if e.errno == errno.EBADF: + # logging.debug(e) + pass + else: + raise + + def on_thread_stop(self): + # Clean up. + with self._lock: + self._descriptors.clear() + self._kq.close() + + +class KqueueObserver(BaseObserver): + + """ + Observer thread that schedules watching directories and dispatches + calls to event handlers. + """ + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseObserver.__init__(self, emitter_class=KqueueEmitter, timeout=timeout) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/polling.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/polling.py new file mode 100644 index 0000000..4ad502c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/polling.py @@ -0,0 +1,145 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +:module: watchdog.observers.polling +:synopsis: Polling emitter implementation. +:author: yesudeep@google.com (Yesudeep Mangalapilly) + +Classes +------- +.. autoclass:: PollingObserver + :members: + :show-inheritance: + +.. autoclass:: PollingObserverVFS + :members: + :show-inheritance: + :special-members: +""" + +from __future__ import with_statement +import os +import threading +from functools import partial +from watchdog.utils import stat as default_stat +from watchdog.utils.dirsnapshot import DirectorySnapshot, DirectorySnapshotDiff +from watchdog.observers.api import ( + EventEmitter, + BaseObserver, + DEFAULT_OBSERVER_TIMEOUT, + DEFAULT_EMITTER_TIMEOUT +) + +from watchdog.events import ( + DirMovedEvent, + DirDeletedEvent, + DirCreatedEvent, + DirModifiedEvent, + FileMovedEvent, + FileDeletedEvent, + FileCreatedEvent, + FileModifiedEvent +) + + +class PollingEmitter(EventEmitter): + """ + Platform-independent emitter that polls a directory to detect file + system changes. + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT, + stat=default_stat, listdir=os.listdir): + EventEmitter.__init__(self, event_queue, watch, timeout) + self._snapshot = None + self._lock = threading.Lock() + self._take_snapshot = lambda: DirectorySnapshot( + self.watch.path, self.watch.is_recursive, stat=stat, listdir=listdir) + + def on_thread_start(self): + self._snapshot = self._take_snapshot() + + def queue_events(self, timeout): + # We don't want to hit the disk continuously. + # timeout behaves like an interval for polling emitters. + if self.stopped_event.wait(timeout): + return + + with self._lock: + if not self.should_keep_running(): + return + + # Get event diff between fresh snapshot and previous snapshot. + # Update snapshot. + try: + new_snapshot = self._take_snapshot() + except OSError: + self.queue_event(DirDeletedEvent(self.watch.path)) + self.stop() + return + + events = DirectorySnapshotDiff(self._snapshot, new_snapshot) + self._snapshot = new_snapshot + + # Files. + for src_path in events.files_deleted: + self.queue_event(FileDeletedEvent(src_path)) + for src_path in events.files_modified: + self.queue_event(FileModifiedEvent(src_path)) + for src_path in events.files_created: + self.queue_event(FileCreatedEvent(src_path)) + for src_path, dest_path in events.files_moved: + self.queue_event(FileMovedEvent(src_path, dest_path)) + + # Directories. + for src_path in events.dirs_deleted: + self.queue_event(DirDeletedEvent(src_path)) + for src_path in events.dirs_modified: + self.queue_event(DirModifiedEvent(src_path)) + for src_path in events.dirs_created: + self.queue_event(DirCreatedEvent(src_path)) + for src_path, dest_path in events.dirs_moved: + self.queue_event(DirMovedEvent(src_path, dest_path)) + + +class PollingObserver(BaseObserver): + """ + Platform-independent observer that polls a directory to detect file + system changes. + """ + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseObserver.__init__(self, emitter_class=PollingEmitter, timeout=timeout) + + +class PollingObserverVFS(BaseObserver): + """ + File system independent observer that polls a directory to detect changes. + """ + + def __init__(self, stat, listdir, polling_interval=1): + """ + :param stat: stat function. See ``os.stat`` for details. + :param listdir: listdir function. See ``os.listdir`` for details. + :type polling_interval: float + :param polling_interval: interval in seconds between polling the file system. + """ + emitter_cls = partial(PollingEmitter, stat=stat, listdir=listdir) + BaseObserver.__init__(self, emitter_class=emitter_cls, timeout=polling_interval) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/read_directory_changes.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/read_directory_changes.py new file mode 100644 index 0000000..bf2044e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/read_directory_changes.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import with_statement + +import threading +import os.path +import time + +from watchdog.events import ( + DirCreatedEvent, + DirMovedEvent, + DirModifiedEvent, + FileCreatedEvent, + FileDeletedEvent, + FileMovedEvent, + FileModifiedEvent, + generate_sub_moved_events, + generate_sub_created_events, +) + +from watchdog.observers.api import ( + EventEmitter, + BaseObserver, + DEFAULT_OBSERVER_TIMEOUT, + DEFAULT_EMITTER_TIMEOUT +) + +from watchdog.observers.winapi import ( + read_events, + get_directory_handle, + close_directory_handle, +) + + +# HACK: +WATCHDOG_TRAVERSE_MOVED_DIR_DELAY = 1 # seconds + + +class WindowsApiEmitter(EventEmitter): + """ + Windows API-based emitter that uses ReadDirectoryChangesW + to detect file system changes for a watch. + """ + + def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT): + EventEmitter.__init__(self, event_queue, watch, timeout) + self._lock = threading.Lock() + self._handle = None + + def on_thread_start(self): + self._handle = get_directory_handle(self.watch.path) + + def on_thread_stop(self): + if self._handle: + close_directory_handle(self._handle) + + def queue_events(self, timeout): + winapi_events = read_events(self._handle, self.watch.is_recursive) + with self._lock: + last_renamed_src_path = "" + for winapi_event in winapi_events: + src_path = os.path.join(self.watch.path, winapi_event.src_path) + + if winapi_event.is_renamed_old: + last_renamed_src_path = src_path + elif winapi_event.is_renamed_new: + dest_path = src_path + src_path = last_renamed_src_path + if os.path.isdir(dest_path): + event = DirMovedEvent(src_path, dest_path) + if self.watch.is_recursive: + # HACK: We introduce a forced delay before + # traversing the moved directory. This will read + # only file movement that finishes within this + # delay time. + time.sleep(WATCHDOG_TRAVERSE_MOVED_DIR_DELAY) + # The following block of code may not + # obtain moved events for the entire tree if + # the I/O is not completed within the above + # delay time. So, it's not guaranteed to work. + # TODO: Come up with a better solution, possibly + # a way to wait for I/O to complete before + # queuing events. + for sub_moved_event in generate_sub_moved_events(src_path, dest_path): + self.queue_event(sub_moved_event) + self.queue_event(event) + else: + self.queue_event(FileMovedEvent(src_path, dest_path)) + elif winapi_event.is_modified: + cls = DirModifiedEvent if os.path.isdir(src_path) else FileModifiedEvent + self.queue_event(cls(src_path)) + elif winapi_event.is_added: + isdir = os.path.isdir(src_path) + cls = DirCreatedEvent if isdir else FileCreatedEvent + self.queue_event(cls(src_path)) + if isdir: + # If a directory is moved from outside the watched folder to inside it + # we only get a created directory event out of it, not any events for its children + # so use the same hack as for file moves to get the child events + time.sleep(WATCHDOG_TRAVERSE_MOVED_DIR_DELAY) + sub_events = generate_sub_created_events(src_path) + for sub_created_event in sub_events: + self.queue_event(sub_created_event) + elif winapi_event.is_removed: + self.queue_event(FileDeletedEvent(src_path)) + + +class WindowsApiObserver(BaseObserver): + """ + Observer thread that schedules watching directories and dispatches + calls to event handlers. + """ + + def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): + BaseObserver.__init__(self, emitter_class=WindowsApiEmitter, + timeout=timeout) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/observers/winapi.py b/flask/venv/lib/python3.6/site-packages/watchdog/observers/winapi.py new file mode 100644 index 0000000..47e9fe8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/observers/winapi.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# winapi.py: Windows API-Python interface (removes dependency on pywin32) +# +# Copyright (C) 2007 Thomas Heller +# Copyright (C) 2010 Will McGugan +# Copyright (C) 2010 Ryan Kelly +# Copyright (C) 2010 Yesudeep Mangalapilly +# Copyright (C) 2014 Thomas Amland +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and / or other materials provided with the distribution. +# * Neither the name of the organization nor the names of its contributors may +# be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# Portions of this code were taken from pyfilesystem, which uses the above +# new BSD license. + +from __future__ import with_statement + +import ctypes.wintypes +from functools import reduce + +try: + LPVOID = ctypes.wintypes.LPVOID +except AttributeError: + # LPVOID wasn't defined in Py2.5, guess it was introduced in Py2.6 + LPVOID = ctypes.c_void_p + +# Invalid handle value. +INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value + +# File notification contants. +FILE_NOTIFY_CHANGE_FILE_NAME = 0x01 +FILE_NOTIFY_CHANGE_DIR_NAME = 0x02 +FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x04 +FILE_NOTIFY_CHANGE_SIZE = 0x08 +FILE_NOTIFY_CHANGE_LAST_WRITE = 0x010 +FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x020 +FILE_NOTIFY_CHANGE_CREATION = 0x040 +FILE_NOTIFY_CHANGE_SECURITY = 0x0100 + +FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 +FILE_FLAG_OVERLAPPED = 0x40000000 +FILE_LIST_DIRECTORY = 1 +FILE_SHARE_READ = 0x01 +FILE_SHARE_WRITE = 0x02 +FILE_SHARE_DELETE = 0x04 +OPEN_EXISTING = 3 + +# File action constants. +FILE_ACTION_CREATED = 1 +FILE_ACTION_DELETED = 2 +FILE_ACTION_MODIFIED = 3 +FILE_ACTION_RENAMED_OLD_NAME = 4 +FILE_ACTION_RENAMED_NEW_NAME = 5 +FILE_ACTION_OVERFLOW = 0xFFFF + +# Aliases +FILE_ACTION_ADDED = FILE_ACTION_CREATED +FILE_ACTION_REMOVED = FILE_ACTION_DELETED + +THREAD_TERMINATE = 0x0001 + +# IO waiting constants. +WAIT_ABANDONED = 0x00000080 +WAIT_IO_COMPLETION = 0x000000C0 +WAIT_OBJECT_0 = 0x00000000 +WAIT_TIMEOUT = 0x00000102 + +# Error codes +ERROR_OPERATION_ABORTED = 995 + + +class OVERLAPPED(ctypes.Structure): + _fields_ = [('Internal', LPVOID), + ('InternalHigh', LPVOID), + ('Offset', ctypes.wintypes.DWORD), + ('OffsetHigh', ctypes.wintypes.DWORD), + ('Pointer', LPVOID), + ('hEvent', ctypes.wintypes.HANDLE), + ] + + +def _errcheck_bool(value, func, args): + if not value: + raise ctypes.WinError() + return args + + +def _errcheck_handle(value, func, args): + if not value: + raise ctypes.WinError() + if value == INVALID_HANDLE_VALUE: + raise ctypes.WinError() + return args + + +def _errcheck_dword(value, func, args): + if value == 0xFFFFFFFF: + raise ctypes.WinError() + return args + + +ReadDirectoryChangesW = ctypes.windll.kernel32.ReadDirectoryChangesW +ReadDirectoryChangesW.restype = ctypes.wintypes.BOOL +ReadDirectoryChangesW.errcheck = _errcheck_bool +ReadDirectoryChangesW.argtypes = ( + ctypes.wintypes.HANDLE, # hDirectory + LPVOID, # lpBuffer + ctypes.wintypes.DWORD, # nBufferLength + ctypes.wintypes.BOOL, # bWatchSubtree + ctypes.wintypes.DWORD, # dwNotifyFilter + ctypes.POINTER(ctypes.wintypes.DWORD), # lpBytesReturned + ctypes.POINTER(OVERLAPPED), # lpOverlapped + LPVOID # FileIOCompletionRoutine # lpCompletionRoutine +) + +CreateFileW = ctypes.windll.kernel32.CreateFileW +CreateFileW.restype = ctypes.wintypes.HANDLE +CreateFileW.errcheck = _errcheck_handle +CreateFileW.argtypes = ( + ctypes.wintypes.LPCWSTR, # lpFileName + ctypes.wintypes.DWORD, # dwDesiredAccess + ctypes.wintypes.DWORD, # dwShareMode + LPVOID, # lpSecurityAttributes + ctypes.wintypes.DWORD, # dwCreationDisposition + ctypes.wintypes.DWORD, # dwFlagsAndAttributes + ctypes.wintypes.HANDLE # hTemplateFile +) + +CloseHandle = ctypes.windll.kernel32.CloseHandle +CloseHandle.restype = ctypes.wintypes.BOOL +CloseHandle.argtypes = ( + ctypes.wintypes.HANDLE, # hObject +) + +CancelIoEx = ctypes.windll.kernel32.CancelIoEx +CancelIoEx.restype = ctypes.wintypes.BOOL +CancelIoEx.errcheck = _errcheck_bool +CancelIoEx.argtypes = ( + ctypes.wintypes.HANDLE, # hObject + ctypes.POINTER(OVERLAPPED) # lpOverlapped +) + +CreateEvent = ctypes.windll.kernel32.CreateEventW +CreateEvent.restype = ctypes.wintypes.HANDLE +CreateEvent.errcheck = _errcheck_handle +CreateEvent.argtypes = ( + LPVOID, # lpEventAttributes + ctypes.wintypes.BOOL, # bManualReset + ctypes.wintypes.BOOL, # bInitialState + ctypes.wintypes.LPCWSTR, # lpName +) + +SetEvent = ctypes.windll.kernel32.SetEvent +SetEvent.restype = ctypes.wintypes.BOOL +SetEvent.errcheck = _errcheck_bool +SetEvent.argtypes = ( + ctypes.wintypes.HANDLE, # hEvent +) + +WaitForSingleObjectEx = ctypes.windll.kernel32.WaitForSingleObjectEx +WaitForSingleObjectEx.restype = ctypes.wintypes.DWORD +WaitForSingleObjectEx.errcheck = _errcheck_dword +WaitForSingleObjectEx.argtypes = ( + ctypes.wintypes.HANDLE, # hObject + ctypes.wintypes.DWORD, # dwMilliseconds + ctypes.wintypes.BOOL, # bAlertable +) + +CreateIoCompletionPort = ctypes.windll.kernel32.CreateIoCompletionPort +CreateIoCompletionPort.restype = ctypes.wintypes.HANDLE +CreateIoCompletionPort.errcheck = _errcheck_handle +CreateIoCompletionPort.argtypes = ( + ctypes.wintypes.HANDLE, # FileHandle + ctypes.wintypes.HANDLE, # ExistingCompletionPort + LPVOID, # CompletionKey + ctypes.wintypes.DWORD, # NumberOfConcurrentThreads +) + +GetQueuedCompletionStatus = ctypes.windll.kernel32.GetQueuedCompletionStatus +GetQueuedCompletionStatus.restype = ctypes.wintypes.BOOL +GetQueuedCompletionStatus.errcheck = _errcheck_bool +GetQueuedCompletionStatus.argtypes = ( + ctypes.wintypes.HANDLE, # CompletionPort + LPVOID, # lpNumberOfBytesTransferred + LPVOID, # lpCompletionKey + ctypes.POINTER(OVERLAPPED), # lpOverlapped + ctypes.wintypes.DWORD, # dwMilliseconds +) + +PostQueuedCompletionStatus = ctypes.windll.kernel32.PostQueuedCompletionStatus +PostQueuedCompletionStatus.restype = ctypes.wintypes.BOOL +PostQueuedCompletionStatus.errcheck = _errcheck_bool +PostQueuedCompletionStatus.argtypes = ( + ctypes.wintypes.HANDLE, # CompletionPort + ctypes.wintypes.DWORD, # lpNumberOfBytesTransferred + ctypes.wintypes.DWORD, # lpCompletionKey + ctypes.POINTER(OVERLAPPED), # lpOverlapped +) + + +class FILE_NOTIFY_INFORMATION(ctypes.Structure): + _fields_ = [("NextEntryOffset", ctypes.wintypes.DWORD), + ("Action", ctypes.wintypes.DWORD), + ("FileNameLength", ctypes.wintypes.DWORD), + #("FileName", (ctypes.wintypes.WCHAR * 1))] + ("FileName", (ctypes.c_char * 1))] + +LPFNI = ctypes.POINTER(FILE_NOTIFY_INFORMATION) + + +# We don't need to recalculate these flags every time a call is made to +# the win32 API functions. +WATCHDOG_FILE_FLAGS = FILE_FLAG_BACKUP_SEMANTICS +WATCHDOG_FILE_SHARE_FLAGS = reduce( + lambda x, y: x | y, [ + FILE_SHARE_READ, + FILE_SHARE_WRITE, + FILE_SHARE_DELETE, + ]) +WATCHDOG_FILE_NOTIFY_FLAGS = reduce( + lambda x, y: x | y, [ + FILE_NOTIFY_CHANGE_FILE_NAME, + FILE_NOTIFY_CHANGE_DIR_NAME, + FILE_NOTIFY_CHANGE_ATTRIBUTES, + FILE_NOTIFY_CHANGE_SIZE, + FILE_NOTIFY_CHANGE_LAST_WRITE, + FILE_NOTIFY_CHANGE_SECURITY, + FILE_NOTIFY_CHANGE_LAST_ACCESS, + FILE_NOTIFY_CHANGE_CREATION, + ]) + +BUFFER_SIZE = 2048 + + +def _parse_event_buffer(readBuffer, nBytes): + results = [] + while nBytes > 0: + fni = ctypes.cast(readBuffer, LPFNI)[0] + ptr = ctypes.addressof(fni) + FILE_NOTIFY_INFORMATION.FileName.offset + #filename = ctypes.wstring_at(ptr, fni.FileNameLength) + filename = ctypes.string_at(ptr, fni.FileNameLength) + results.append((fni.Action, filename.decode('utf-16'))) + numToSkip = fni.NextEntryOffset + if numToSkip <= 0: + break + readBuffer = readBuffer[numToSkip:] + nBytes -= numToSkip # numToSkip is long. nBytes should be long too. + return results + + +def get_directory_handle(path): + """Returns a Windows handle to the specified directory path.""" + return CreateFileW(path, FILE_LIST_DIRECTORY, WATCHDOG_FILE_SHARE_FLAGS, + None, OPEN_EXISTING, WATCHDOG_FILE_FLAGS, None) + + +def close_directory_handle(handle): + try: + CancelIoEx(handle, None) # force ReadDirectoryChangesW to return + CloseHandle(handle) # close directory handle + except WindowsError: + try: + CloseHandle(handle) # close directory handle + except: + return + + +def read_directory_changes(handle, recursive): + """Read changes to the directory using the specified directory handle. + + http://timgolden.me.uk/pywin32-docs/win32file__ReadDirectoryChangesW_meth.html + """ + event_buffer = ctypes.create_string_buffer(BUFFER_SIZE) + nbytes = ctypes.wintypes.DWORD() + try: + ReadDirectoryChangesW(handle, ctypes.byref(event_buffer), + len(event_buffer), recursive, + WATCHDOG_FILE_NOTIFY_FLAGS, + ctypes.byref(nbytes), None, None) + except WindowsError as e: + if e.winerror == ERROR_OPERATION_ABORTED: + return [], 0 + raise e + + # Python 2/3 compat + try: + int_class = long + except NameError: + int_class = int + return event_buffer.raw, int_class(nbytes.value) + + +class WinAPINativeEvent(object): + def __init__(self, action, src_path): + self.action = action + self.src_path = src_path + + @property + def is_added(self): + return self.action == FILE_ACTION_CREATED + + @property + def is_removed(self): + return self.action == FILE_ACTION_REMOVED + + @property + def is_modified(self): + return self.action == FILE_ACTION_MODIFIED + + @property + def is_renamed_old(self): + return self.action == FILE_ACTION_RENAMED_OLD_NAME + + @property + def is_renamed_new(self): + return self.action == FILE_ACTION_RENAMED_NEW_NAME + + def __repr__(self): + return ("" % (self.action, self.src_path)) + + +def read_events(handle, recursive): + buf, nbytes = read_directory_changes(handle, recursive) + events = _parse_event_buffer(buf, nbytes) + return [WinAPINativeEvent(action, path) for action, path in events] diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__init__.py b/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__init__.py new file mode 100644 index 0000000..7e1c9fe --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__init__.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import signal +import subprocess +import time + +from watchdog.utils import echo, has_attribute +from watchdog.events import PatternMatchingEventHandler + + +class Trick(PatternMatchingEventHandler): + + """Your tricks should subclass this class.""" + + @classmethod + def generate_yaml(cls): + context = dict(module_name=cls.__module__, + klass_name=cls.__name__) + template_yaml = """- %(module_name)s.%(klass_name)s: + args: + - argument1 + - argument2 + kwargs: + patterns: + - "*.py" + - "*.js" + ignore_patterns: + - "version.py" + ignore_directories: false +""" + return template_yaml % context + + +class LoggerTrick(Trick): + + """A simple trick that does only logs events.""" + + def on_any_event(self, event): + pass + + @echo.echo + def on_modified(self, event): + pass + + @echo.echo + def on_deleted(self, event): + pass + + @echo.echo + def on_created(self, event): + pass + + @echo.echo + def on_moved(self, event): + pass + + +class ShellCommandTrick(Trick): + + """Executes shell commands in response to matched events.""" + + def __init__(self, shell_command=None, patterns=None, ignore_patterns=None, + ignore_directories=False, wait_for_process=False, + drop_during_process=False): + super(ShellCommandTrick, self).__init__(patterns, ignore_patterns, + ignore_directories) + self.shell_command = shell_command + self.wait_for_process = wait_for_process + self.drop_during_process = drop_during_process + self.process = None + + def on_any_event(self, event): + from string import Template + + if self.drop_during_process and self.process and self.process.poll() is None: + return + + if event.is_directory: + object_type = 'directory' + else: + object_type = 'file' + + context = { + 'watch_src_path': event.src_path, + 'watch_dest_path': '', + 'watch_event_type': event.event_type, + 'watch_object': object_type, + } + + if self.shell_command is None: + if has_attribute(event, 'dest_path'): + context.update({'dest_path': event.dest_path}) + command = 'echo "${watch_event_type} ${watch_object} from ${watch_src_path} to ${watch_dest_path}"' + else: + command = 'echo "${watch_event_type} ${watch_object} ${watch_src_path}"' + else: + if has_attribute(event, 'dest_path'): + context.update({'watch_dest_path': event.dest_path}) + command = self.shell_command + + command = Template(command).safe_substitute(**context) + self.process = subprocess.Popen(command, shell=True) + if self.wait_for_process: + self.process.wait() + + +class AutoRestartTrick(Trick): + + """Starts a long-running subprocess and restarts it on matched events. + + The command parameter is a list of command arguments, such as + ['bin/myserver', '-c', 'etc/myconfig.ini']. + + Call start() after creating the Trick. Call stop() when stopping + the process. + """ + + def __init__(self, command, patterns=None, ignore_patterns=None, + ignore_directories=False, stop_signal=signal.SIGINT, + kill_after=10): + super(AutoRestartTrick, self).__init__( + patterns, ignore_patterns, ignore_directories) + self.command = command + self.stop_signal = stop_signal + self.kill_after = kill_after + self.process = None + + def start(self): + self.process = subprocess.Popen(self.command, preexec_fn=os.setsid) + + def stop(self): + if self.process is None: + return + try: + os.killpg(os.getpgid(self.process.pid), self.stop_signal) + except OSError: + # Process is already gone + pass + else: + kill_time = time.time() + self.kill_after + while time.time() < kill_time: + if self.process.poll() is not None: + break + time.sleep(0.25) + else: + try: + os.killpg(os.getpgid(self.process.pid), 9) + except OSError: + # Process is already gone + pass + self.process = None + + @echo.echo + def on_any_event(self, event): + self.stop() + self.start() diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..b8c1cb3 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/tricks/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__init__.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__init__.py new file mode 100644 index 0000000..dfe067d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__init__.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +:module: watchdog.utils +:synopsis: Utility classes and functions. +:author: yesudeep@google.com (Yesudeep Mangalapilly) + +Classes +------- +.. autoclass:: BaseThread + :members: + :show-inheritance: + :inherited-members: + +""" +import os +import sys +import threading +from watchdog.utils import platform +from watchdog.utils.compat import Event + + +if sys.version_info[0] == 2 and platform.is_windows(): + # st_ino is not implemented in os.stat on this platform + import win32stat + stat = win32stat.stat +else: + stat = os.stat + + +def has_attribute(ob, attribute): + """ + :func:`hasattr` swallows exceptions. :func:`has_attribute` tests a Python object for the + presence of an attribute. + + :param ob: + object to inspect + :param attribute: + ``str`` for the name of the attribute. + """ + return getattr(ob, attribute, None) is not None + + +class UnsupportedLibc(Exception): + pass + + +class BaseThread(threading.Thread): + """ Convenience class for creating stoppable threads. """ + + def __init__(self): + threading.Thread.__init__(self) + if has_attribute(self, 'daemon'): + self.daemon = True + else: + self.setDaemon(True) + self._stopped_event = Event() + + if not has_attribute(self._stopped_event, 'is_set'): + self._stopped_event.is_set = self._stopped_event.isSet + + @property + def stopped_event(self): + return self._stopped_event + + def should_keep_running(self): + """Determines whether the thread should continue running.""" + return not self._stopped_event.is_set() + + def on_thread_stop(self): + """Override this method instead of :meth:`stop()`. + :meth:`stop()` calls this method. + + This method is called immediately after the thread is signaled to stop. + """ + pass + + def stop(self): + """Signals the thread to stop.""" + self._stopped_event.set() + self.on_thread_stop() + + def on_thread_start(self): + """Override this method instead of :meth:`start()`. :meth:`start()` + calls this method. + + This method is called right before this thread is started and this + object’s run() method is invoked. + """ + pass + + def start(self): + self.on_thread_start() + threading.Thread.start(self) + + +def load_module(module_name): + """Imports a module given its name and returns a handle to it.""" + try: + __import__(module_name) + except ImportError: + raise ImportError('No module named %s' % module_name) + return sys.modules[module_name] + + +def load_class(dotted_path): + """Loads and returns a class definition provided a dotted path + specification the last part of the dotted path is the class name + and there is at least one module name preceding the class name. + + Notes: + You will need to ensure that the module you are trying to load + exists in the Python path. + + Examples: + - module.name.ClassName # Provided module.name is in the Python path. + - module.ClassName # Provided module is in the Python path. + + What won't work: + - ClassName + - modle.name.ClassName # Typo in module name. + - module.name.ClasNam # Typo in classname. + """ + dotted_path_split = dotted_path.split('.') + if len(dotted_path_split) > 1: + klass_name = dotted_path_split[-1] + module_name = '.'.join(dotted_path_split[:-1]) + + module = load_module(module_name) + if has_attribute(module, klass_name): + klass = getattr(module, klass_name) + return klass + # Finally create and return an instance of the class + # return klass(*args, **kwargs) + else: + raise AttributeError('Module %s does not have class attribute %s' % ( + module_name, klass_name)) + else: + raise ValueError( + 'Dotted module path %s must contain a module name and a classname' % dotted_path) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..1a19f25 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/bricks.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/bricks.cpython-36.pyc new file mode 100644 index 0000000..f1dc334 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/bricks.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..af568b6 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/decorators.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/decorators.cpython-36.pyc new file mode 100644 index 0000000..020babd Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/decorators.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/delayed_queue.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/delayed_queue.cpython-36.pyc new file mode 100644 index 0000000..97d09fa Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/delayed_queue.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/dirsnapshot.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/dirsnapshot.cpython-36.pyc new file mode 100644 index 0000000..2dab3ff Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/dirsnapshot.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/echo.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/echo.cpython-36.pyc new file mode 100644 index 0000000..89d7701 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/echo.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/event_backport.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/event_backport.cpython-36.pyc new file mode 100644 index 0000000..89eb610 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/event_backport.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/importlib2.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/importlib2.cpython-36.pyc new file mode 100644 index 0000000..dc6d4a9 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/importlib2.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/platform.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/platform.cpython-36.pyc new file mode 100644 index 0000000..7746a62 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/platform.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/unicode_paths.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/unicode_paths.cpython-36.pyc new file mode 100644 index 0000000..b2a77cc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/unicode_paths.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/win32stat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/win32stat.cpython-36.pyc new file mode 100644 index 0000000..00dbeae Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/watchdog/utils/__pycache__/win32stat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/bricks.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/bricks.py new file mode 100644 index 0000000..b6d6516 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/bricks.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +Utility collections or "bricks". + +:module: watchdog.utils.bricks +:author: yesudeep@google.com (Yesudeep Mangalapilly) +:author: lalinsky@gmail.com (Lukáš Lalinský) +:author: python@rcn.com (Raymond Hettinger) + +Classes +======= +.. autoclass:: OrderedSetQueue + :members: + :show-inheritance: + :inherited-members: + +.. autoclass:: OrderedSet + +""" + +import sys +import collections +from .compat import queue + +class SkipRepeatsQueue(queue.Queue): + + """Thread-safe implementation of an special queue where a + put of the last-item put'd will be dropped. + + The implementation leverages locking already implemented in the base class + redefining only the primitives. + + Queued items must be immutable and hashable so that they can be used + as dictionary keys. You must implement **only read-only properties** and + the :meth:`Item.__hash__()`, :meth:`Item.__eq__()`, and + :meth:`Item.__ne__()` methods for items to be hashable. + + An example implementation follows:: + + class Item(object): + def __init__(self, a, b): + self._a = a + self._b = b + + @property + def a(self): + return self._a + + @property + def b(self): + return self._b + + def _key(self): + return (self._a, self._b) + + def __eq__(self, item): + return self._key() == item._key() + + def __ne__(self, item): + return self._key() != item._key() + + def __hash__(self): + return hash(self._key()) + + based on the OrderedSetQueue below + """ + + def _init(self, maxsize): + queue.Queue._init(self, maxsize) + self._last_item = None + + def _put(self, item): + if item != self._last_item: + queue.Queue._put(self, item) + self._last_item = item + else: + # `put` increments `unfinished_tasks` even if we did not put + # anything into the queue here + self.unfinished_tasks -= 1 + + def _get(self): + item = queue.Queue._get(self) + if item is self._last_item: + self._last_item = None + return item + + +class OrderedSetQueue(queue.Queue): + + """Thread-safe implementation of an ordered set queue. + + Disallows adding a duplicate item while maintaining the + order of items in the queue. The implementation leverages + locking already implemented in the base class + redefining only the primitives. Since the internal queue + is not replaced, the order is maintained. The set is used + merely to check for the existence of an item. + + Queued items must be immutable and hashable so that they can be used + as dictionary keys. You must implement **only read-only properties** and + the :meth:`Item.__hash__()`, :meth:`Item.__eq__()`, and + :meth:`Item.__ne__()` methods for items to be hashable. + + An example implementation follows:: + + class Item(object): + def __init__(self, a, b): + self._a = a + self._b = b + + @property + def a(self): + return self._a + + @property + def b(self): + return self._b + + def _key(self): + return (self._a, self._b) + + def __eq__(self, item): + return self._key() == item._key() + + def __ne__(self, item): + return self._key() != item._key() + + def __hash__(self): + return hash(self._key()) + + :author: lalinsky@gmail.com (Lukáš Lalinský) + :url: http://stackoverflow.com/questions/1581895/how-check-if-a-task-is-already-in-python-queue + """ + + def _init(self, maxsize): + queue.Queue._init(self, maxsize) + self._set_of_items = set() + + def _put(self, item): + if item not in self._set_of_items: + queue.Queue._put(self, item) + self._set_of_items.add(item) + else: + # `put` increments `unfinished_tasks` even if we did not put + # anything into the queue here + self.unfinished_tasks -= 1 + + def _get(self): + item = queue.Queue._get(self) + self._set_of_items.remove(item) + return item + + +if sys.version_info >= (2, 6, 0): + KEY, PREV, NEXT = list(range(3)) + + class OrderedSet(collections.MutableSet): + + """ + Implementation based on a doubly-linked link and an internal dictionary. + This design gives :class:`OrderedSet` the same big-Oh running times as + regular sets including O(1) adds, removes, and lookups as well as + O(n) iteration. + + .. ADMONITION:: Implementation notes + + Runs on Python 2.6 or later (and runs on Python 3.0 or later + without any modifications). + + :author: python@rcn.com (Raymond Hettinger) + :url: http://code.activestate.com/recipes/576694/ + """ + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[PREV] + curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev, _next = self.map.pop(key) + prev[NEXT] = _next + _next[PREV] = prev + + def __iter__(self): + end = self.end + curr = end[NEXT] + while curr is not end: + yield curr[KEY] + curr = curr[NEXT] + + def __reversed__(self): + end = self.end + curr = end[PREV] + while curr is not end: + yield curr[KEY] + curr = curr[PREV] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = next(reversed(self)) if last else next(iter(self)) + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + def __del__(self): + self.clear() # remove circular references diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/compat.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/compat.py new file mode 100644 index 0000000..0f6e794 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/compat.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys + +__all__ = ['queue', 'Event'] + +try: + import queue +except ImportError: + import Queue as queue + + +if sys.version_info < (2, 7): + from watchdog.utils.event_backport import Event +else: + from threading import Event \ No newline at end of file diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/decorators.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/decorators.py new file mode 100644 index 0000000..abb325c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/decorators.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Most of this code was obtained from the Python documentation online. + +"""Decorator utility functions. + +decorators: +- synchronized +- propertyx +- accepts +- returns +- singleton +- attrs +- deprecated +""" + +import functools +import warnings +import threading +import sys + + +def synchronized(lock=None): + """Decorator that synchronizes a method or a function with a mutex lock. + + Example usage: + + @synchronized() + def operation(self, a, b): + ... + """ + if lock is None: + lock = threading.Lock() + + def wrapper(function): + def new_function(*args, **kwargs): + lock.acquire() + try: + return function(*args, **kwargs) + finally: + lock.release() + + return new_function + + return wrapper + + +def propertyx(function): + """Decorator to easily create properties in classes. + + Example: + + class Angle(object): + def __init__(self, rad): + self._rad = rad + + @property + def rad(): + def fget(self): + return self._rad + def fset(self, angle): + if isinstance(angle, Angle): + angle = angle.rad + self._rad = float(angle) + + Arguments: + - `function`: The function to be decorated. + """ + keys = ('fget', 'fset', 'fdel') + func_locals = {'doc': function.__doc__} + + def probe_func(frame, event, arg): + if event == 'return': + locals = frame.f_locals + func_locals.update(dict((k, locals.get(k)) for k in keys)) + sys.settrace(None) + return probe_func + + sys.settrace(probe_func) + function() + return property(**func_locals) + + +def accepts(*types): + """Decorator to ensure that the decorated function accepts the given types as arguments. + + Example: + @accepts(int, (int,float)) + @returns((int,float)) + def func(arg1, arg2): + return arg1 * arg2 + """ + + def check_accepts(f): + assert len(types) == f.__code__.co_argcount + + def new_f(*args, **kwds): + for (a, t) in zip(args, types): + assert isinstance(a, t),\ + "arg %r does not match %s" % (a, t) + return f(*args, **kwds) + + new_f.__name__ = f.__name__ + return new_f + + return check_accepts + + +def returns(rtype): + """Decorator to ensure that the decorated function returns the given + type as argument. + + Example: + @accepts(int, (int,float)) + @returns((int,float)) + def func(arg1, arg2): + return arg1 * arg2 + """ + + def check_returns(f): + def new_f(*args, **kwds): + result = f(*args, **kwds) + assert isinstance(result, rtype),\ + "return value %r does not match %s" % (result, rtype) + return result + + new_f.__name__ = f.__name__ + return new_f + + return check_returns + + +def singleton(cls): + """Decorator to ensures a class follows the singleton pattern. + + Example: + @singleton + class MyClass: + ... + """ + instances = {} + + def getinstance(): + if cls not in instances: + instances[cls] = cls() + return instances[cls] + + return getinstance + + +def attrs(**kwds): + """Decorator to add attributes to a function. + + Example: + + @attrs(versionadded="2.2", + author="Guido van Rossum") + def mymethod(f): + ... + """ + + def decorate(f): + for k in kwds: + setattr(f, k, kwds[k]) + return f + + return decorate + + +def deprecated(func): + """This is a decorator which can be used to mark functions + as deprecated. It will result in a warning being emitted + when the function is used. + + ## Usage examples ## + @deprecated + def my_func(): + pass + + @other_decorators_must_be_upper + @deprecated + def my_func(): + pass + """ + + @functools.wraps(func) + def new_func(*args, **kwargs): + warnings.warn_explicit( + "Call to deprecated function %(funcname)s." % { + 'funcname': func.__name__, + }, + category=DeprecationWarning, + filename=func.__code__.co_filename, + lineno=func.__code__.co_firstlineno + 1 + ) + return func(*args, **kwargs) + + return new_func diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/delayed_queue.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/delayed_queue.py new file mode 100644 index 0000000..6d98a50 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/delayed_queue.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import threading +from collections import deque + + +class DelayedQueue(object): + + def __init__(self, delay): + self.delay = delay + self._lock = threading.Lock() + self._not_empty = threading.Condition(self._lock) + self._queue = deque() + self._closed = False + + def put(self, element): + """Add element to queue.""" + self._lock.acquire() + self._queue.append((element, time.time())) + self._not_empty.notify() + self._lock.release() + + def close(self): + """Close queue, indicating no more items will be added.""" + self._closed = True + # Interrupt the blocking _not_empty.wait() call in get + self._not_empty.acquire() + self._not_empty.notify() + self._not_empty.release() + + def get(self): + """Remove and return an element from the queue, or this queue has been + closed raise the Closed exception. + """ + while True: + # wait for element to be added to queue + self._not_empty.acquire() + while len(self._queue) == 0 and not self._closed: + self._not_empty.wait() + + if self._closed: + self._not_empty.release() + return None + head, insert_time = self._queue[0] + self._not_empty.release() + + # wait for delay + time_left = insert_time + self.delay - time.time() + while time_left > 0: + time.sleep(time_left) + time_left = insert_time + self.delay - time.time() + + # return element if it's still in the queue + self._lock.acquire() + try: + if len(self._queue) > 0 and self._queue[0][0] is head: + self._queue.popleft() + return head + finally: + self._lock.release() + + def remove(self, predicate): + """Remove and return the first items for which predicate is True, + ignoring delay.""" + try: + self._lock.acquire() + for i, (elem, t) in enumerate(self._queue): + if predicate(elem): + del self._queue[i] + return elem + finally: + self._lock.release() + return None diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/dirsnapshot.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/dirsnapshot.py new file mode 100644 index 0000000..af54c9a --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/dirsnapshot.py @@ -0,0 +1,293 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.utils.dirsnapshot +:synopsis: Directory snapshots and comparison. +:author: yesudeep@google.com (Yesudeep Mangalapilly) + +.. ADMONITION:: Where are the moved events? They "disappeared" + + This implementation does not take partition boundaries + into consideration. It will only work when the directory + tree is entirely on the same file system. More specifically, + any part of the code that depends on inode numbers can + break if partition boundaries are crossed. In these cases, + the snapshot diff will represent file/directory movement as + created and deleted events. + +Classes +------- +.. autoclass:: DirectorySnapshot + :members: + :show-inheritance: + +.. autoclass:: DirectorySnapshotDiff + :members: + :show-inheritance: + +""" + +import errno +import os +from stat import S_ISDIR +from watchdog.utils import stat as default_stat + + +class DirectorySnapshotDiff(object): + """ + Compares two directory snapshots and creates an object that represents + the difference between the two snapshots. + + :param ref: + The reference directory snapshot. + :type ref: + :class:`DirectorySnapshot` + :param snapshot: + The directory snapshot which will be compared + with the reference snapshot. + :type snapshot: + :class:`DirectorySnapshot` + """ + + def __init__(self, ref, snapshot): + created = snapshot.paths - ref.paths + deleted = ref.paths - snapshot.paths + + # check that all unchanged paths have the same inode + for path in ref.paths & snapshot.paths: + if ref.inode(path) != snapshot.inode(path): + created.add(path) + deleted.add(path) + + # find moved paths + moved = set() + for path in set(deleted): + inode = ref.inode(path) + new_path = snapshot.path(inode) + if new_path: + # file is not deleted but moved + deleted.remove(path) + moved.add((path, new_path)) + + for path in set(created): + inode = snapshot.inode(path) + old_path = ref.path(inode) + if old_path: + created.remove(path) + moved.add((old_path, path)) + + # find modified paths + # first check paths that have not moved + modified = set() + for path in ref.paths & snapshot.paths: + if ref.inode(path) == snapshot.inode(path): + if ref.mtime(path) != snapshot.mtime(path): + modified.add(path) + + for (old_path, new_path) in moved: + if ref.mtime(old_path) != snapshot.mtime(new_path): + modified.add(old_path) + + self._dirs_created = [path for path in created if snapshot.isdir(path)] + self._dirs_deleted = [path for path in deleted if ref.isdir(path)] + self._dirs_modified = [path for path in modified if ref.isdir(path)] + self._dirs_moved = [(frm, to) for (frm, to) in moved if ref.isdir(frm)] + + self._files_created = list(created - set(self._dirs_created)) + self._files_deleted = list(deleted - set(self._dirs_deleted)) + self._files_modified = list(modified - set(self._dirs_modified)) + self._files_moved = list(moved - set(self._dirs_moved)) + + @property + def files_created(self): + """List of files that were created.""" + return self._files_created + + @property + def files_deleted(self): + """List of files that were deleted.""" + return self._files_deleted + + @property + def files_modified(self): + """List of files that were modified.""" + return self._files_modified + + @property + def files_moved(self): + """ + List of files that were moved. + + Each event is a two-tuple the first item of which is the path + that has been renamed to the second item in the tuple. + """ + return self._files_moved + + @property + def dirs_modified(self): + """ + List of directories that were modified. + """ + return self._dirs_modified + + @property + def dirs_moved(self): + """ + List of directories that were moved. + + Each event is a two-tuple the first item of which is the path + that has been renamed to the second item in the tuple. + """ + return self._dirs_moved + + @property + def dirs_deleted(self): + """ + List of directories that were deleted. + """ + return self._dirs_deleted + + @property + def dirs_created(self): + """ + List of directories that were created. + """ + return self._dirs_created + +class DirectorySnapshot(object): + """ + A snapshot of stat information of files in a directory. + + :param path: + The directory path for which a snapshot should be taken. + :type path: + ``str`` + :param recursive: + ``True`` if the entire directory tree should be included in the + snapshot; ``False`` otherwise. + :type recursive: + ``bool`` + :param walker_callback: + .. deprecated:: 0.7.2 + :param stat: + Use custom stat function that returns a stat structure for path. + Currently only st_dev, st_ino, st_mode and st_mtime are needed. + + A function with the signature ``walker_callback(path, stat_info)`` + which will be called for every entry in the directory tree. + :param listdir: + Use custom listdir function. See ``os.listdir`` for details. + """ + + def __init__(self, path, recursive=True, + walker_callback=(lambda p, s: None), + stat=default_stat, + listdir=os.listdir): + self._stat_info = {} + self._inode_to_path = {} + + st = stat(path) + self._stat_info[path] = st + self._inode_to_path[(st.st_ino, st.st_dev)] = path + + def walk(root): + try: + paths = [os.path.join(root, name) for name in listdir(root)] + except OSError as e: + # Directory may have been deleted between finding it in the directory + # list of its parent and trying to delete its contents. If this + # happens we treat it as empty. + if e.errno == errno.ENOENT: + return + else: + raise + entries = [] + for p in paths: + try: + entries.append((p, stat(p))) + except OSError: + continue + for _ in entries: + yield _ + if recursive: + for path, st in entries: + if S_ISDIR(st.st_mode): + for _ in walk(path): + yield _ + + for p, st in walk(path): + i = (st.st_ino, st.st_dev) + self._inode_to_path[i] = p + self._stat_info[p] = st + walker_callback(p, st) + + @property + def paths(self): + """ + Set of file/directory paths in the snapshot. + """ + return set(self._stat_info.keys()) + + def path(self, id): + """ + Returns path for id. None if id is unknown to this snapshot. + """ + return self._inode_to_path.get(id) + + def inode(self, path): + """ Returns an id for path. """ + st = self._stat_info[path] + return (st.st_ino, st.st_dev) + + def isdir(self, path): + return S_ISDIR(self._stat_info[path].st_mode) + + def mtime(self, path): + return self._stat_info[path].st_mtime + + def stat_info(self, path): + """ + Returns a stat information object for the specified path from + the snapshot. + + Attached information is subject to change. Do not use unless + you specify `stat` in constructor. Use :func:`inode`, :func:`mtime`, + :func:`isdir` instead. + + :param path: + The path for which stat information should be obtained + from a snapshot. + """ + return self._stat_info[path] + + def __sub__(self, previous_dirsnap): + """Allow subtracting a DirectorySnapshot object instance from + another. + + :returns: + A :class:`DirectorySnapshotDiff` object. + """ + return DirectorySnapshotDiff(previous_dirsnap, self) + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return str(self._stat_info) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/echo.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/echo.py new file mode 100644 index 0000000..12803e0 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/echo.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# echo.py: Tracing function calls using Python decorators. +# +# Written by Thomas Guest +# Please see http://wordaligned.org/articles/echo +# +# Place into the public domain. + +""" Echo calls made to functions and methods in a module. + +"Echoing" a function call means printing out the name of the function +and the values of its arguments before making the call (which is more +commonly referred to as "tracing", but Python already has a trace module). + +Example: to echo calls made to functions in "my_module" do: + + import echo + import my_module + echo.echo_module(my_module) + +Example: to echo calls made to functions in "my_module.my_class" do: + + echo.echo_class(my_module.my_class) + +Alternatively, echo.echo can be used to decorate functions. Calls to the +decorated function will be echoed. + +Example: + + @echo.echo + def my_function(args): + pass +""" +import inspect +import sys + + +def name(item): + " Return an item's name. " + return item.__name__ + + +def is_classmethod(instancemethod, klass): + " Determine if an instancemethod is a classmethod. " + return inspect.ismethod(instancemethod) and instancemethod.__self__ is klass + +def is_static_method(method, klass): + """Returns True if method is an instance method of klass.""" + for c in klass.mro(): + if name(method) in c.__dict__: + return isinstance(c.__dict__[name(method)], staticmethod) + else: + return False + +def is_class_private_name(name): + " Determine if a name is a class private name. " + # Exclude system defined names such as __init__, __add__ etc + return name.startswith("__") and not name.endswith("__") + + +def method_name(method): + """ Return a method's name. + + This function returns the name the method is accessed by from + outside the class (i.e. it prefixes "private" methods appropriately). + """ + mname = name(method) + if is_class_private_name(mname): + mname = "_%s%s" % (name(method.__self__.__class__), mname) + return mname + + +def format_arg_value(arg_val): + """ Return a string representing a (name, value) pair. + + >>> format_arg_value(('x', (1, 2, 3))) + 'x=(1, 2, 3)' + """ + arg, val = arg_val + return "%s=%r" % (arg, val) + + +def echo(fn, write=sys.stdout.write): + """ Echo calls to a function. + + Returns a decorated version of the input function which "echoes" calls + made to it by writing out the function's name and the arguments it was + called with. + """ + import functools + # Unpack function's arg count, arg names, arg defaults + code = fn.__code__ + argcount = code.co_argcount + argnames = code.co_varnames[:argcount] + fn_defaults = fn.__defaults__ or list() + argdefs = dict(list(zip(argnames[-len(fn_defaults):], fn_defaults))) + + @functools.wraps(fn) + def wrapped(*v, **k): + # Collect function arguments by chaining together positional, + # defaulted, extra positional and keyword arguments. + positional = list(map(format_arg_value, list(zip(argnames, v)))) + defaulted = [format_arg_value((a, argdefs[a])) + for a in argnames[len(v):] if a not in k] + nameless = list(map(repr, v[argcount:])) + keyword = list(map(format_arg_value, list(k.items()))) + args = positional + defaulted + nameless + keyword + write("%s(%s)\n" % (name(fn), ", ".join(args))) + return fn(*v, **k) + + return wrapped + + +def echo_instancemethod(klass, method, write=sys.stdout.write): + """ Change an instancemethod so that calls to it are echoed. + + Replacing a classmethod is a little more tricky. + See: http://www.python.org/doc/current/ref/types.html + """ + mname = method_name(method) + never_echo = "__str__", "__repr__", # Avoid recursion printing method calls + if mname in never_echo: + pass + elif is_classmethod(method, klass): + setattr(klass, mname, classmethod(echo(method.__func__, write))) + else: + setattr(klass, mname, echo(method, write)) + +def echo_class(klass, write=sys.stdout.write): + """ Echo calls to class methods and static functions + """ + for _, method in inspect.getmembers(klass, inspect.ismethod): + #In python 3 only class methods are returned here, but in python2 instance methods are too. + echo_instancemethod(klass, method, write) + for _, fn in inspect.getmembers(klass, inspect.isfunction): + if is_static_method(fn, klass): + setattr(klass, name(fn), staticmethod(echo(fn, write))) + else: + #It's not a class or a static method, so it must be an instance method. + #This should only be called in python 3, because in python 3 instance methods are considered functions. + echo_instancemethod(klass, fn, write) + +def echo_module(mod, write=sys.stdout.write): + """ Echo calls to functions and methods in a module. + """ + for fname, fn in inspect.getmembers(mod, inspect.isfunction): + setattr(mod, fname, echo(fn, write)) + for _, klass in inspect.getmembers(mod, inspect.isclass): + echo_class(klass, write) + +if __name__ == "__main__": + import doctest + + optionflags = doctest.ELLIPSIS + doctest.testfile('echoexample.txt', optionflags=optionflags) + doctest.testmod(optionflags=optionflags) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/event_backport.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/event_backport.py new file mode 100644 index 0000000..5c136e4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/event_backport.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Backport of Event from py2.7 (method wait in py2.6 returns None) + +from threading import Condition, Lock + + +class Event(object): + + def __init__(self,): + self.__cond = Condition(Lock()) + self.__flag = False + + def isSet(self): + return self.__flag + + is_set = isSet + + def set(self): + self.__cond.acquire() + try: + self.__flag = True + self.__cond.notify_all() + finally: + self.__cond.release() + + def clear(self): + self.__cond.acquire() + try: + self.__flag = False + finally: + self.__cond.release() + + def wait(self, timeout=None): + self.__cond.acquire() + try: + if not self.__flag: + self.__cond.wait(timeout) + return self.__flag + finally: + self.__cond.release() diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/importlib2.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/importlib2.py new file mode 100644 index 0000000..5ad3ec5 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/importlib2.py @@ -0,0 +1,40 @@ +# The MIT License (MIT) + +# Copyright (c) 2013 Peter M. Elias + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE + + +def import_module(target, relative_to=None): + target_parts = target.split('.') + target_depth = target_parts.count('') + target_path = target_parts[target_depth:] + target = target[target_depth:] + fromlist = [target] + if target_depth and relative_to: + relative_parts = relative_to.split('.') + relative_to = '.'.join(relative_parts[:-(target_depth - 1) or None]) + if len(target_path) > 1: + relative_to = '.'.join(filter(None, [relative_to]) + target_path[:-1]) + fromlist = target_path[-1:] + target = fromlist[0] + elif not relative_to: + fromlist = [] + mod = __import__(relative_to or target, globals(), locals(), fromlist) + return getattr(mod, target, mod) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/platform.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/platform.py new file mode 100644 index 0000000..239c6a2 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/platform.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import sys + +PLATFORM_WINDOWS = 'windows' +PLATFORM_LINUX = 'linux' +PLATFORM_BSD = 'bsd' +PLATFORM_DARWIN = 'darwin' +PLATFORM_UNKNOWN = 'unknown' + + +def get_platform_name(): + if sys.platform.startswith("win"): + return PLATFORM_WINDOWS + elif sys.platform.startswith('darwin'): + return PLATFORM_DARWIN + elif sys.platform.startswith('linux'): + return PLATFORM_LINUX + elif sys.platform.startswith(('dragonfly', 'freebsd', 'netbsd', 'openbsd', )): + return PLATFORM_BSD + else: + return PLATFORM_UNKNOWN + +__platform__ = get_platform_name() + + +def is_linux(): + return __platform__ == PLATFORM_LINUX + + +def is_bsd(): + return __platform__ == PLATFORM_BSD + + +def is_darwin(): + return __platform__ == PLATFORM_DARWIN + + +def is_windows(): + return __platform__ == PLATFORM_WINDOWS diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/unicode_paths.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/unicode_paths.py new file mode 100644 index 0000000..501a2f1 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/unicode_paths.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2013 Will Bond +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +import sys + +from watchdog.utils import platform + +try: + # Python 2 + str_cls = unicode + bytes_cls = str +except NameError: + # Python 3 + str_cls = str + bytes_cls = bytes + + +# This is used by Linux when the locale seems to be improperly set. UTF-8 tends +# to be the encoding used by all distros, so this is a good fallback. +fs_fallback_encoding = 'utf-8' +fs_encoding = sys.getfilesystemencoding() or fs_fallback_encoding + + +def encode(path): + if isinstance(path, str_cls): + try: + path = path.encode(fs_encoding, 'strict') + except UnicodeEncodeError: + if not platform.is_linux(): + raise + path = path.encode(fs_fallback_encoding, 'strict') + return path + + +def decode(path): + if isinstance(path, bytes_cls): + try: + path = path.decode(fs_encoding, 'strict') + except UnicodeDecodeError: + if not platform.is_linux(): + raise + path = path.decode(fs_fallback_encoding, 'strict') + return path diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/utils/win32stat.py b/flask/venv/lib/python3.6/site-packages/watchdog/utils/win32stat.py new file mode 100644 index 0000000..c18d66f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/utils/win32stat.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Thomas Amland +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.utils.win32stat +:synopsis: Implementation of stat with st_ino and st_dev support. + +Functions +--------- + +.. autofunction:: stat + +""" + +import ctypes +import ctypes.wintypes +import stat as stdstat +from collections import namedtuple + + +INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value +OPEN_EXISTING = 3 +FILE_READ_ATTRIBUTES = 0x80 +FILE_ATTRIBUTE_NORMAL = 0x80 +FILE_ATTRIBUTE_READONLY = 0x1 +FILE_ATTRIBUTE_DIRECTORY = 0x10 +FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 +FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000 + + +class FILETIME(ctypes.Structure): + _fields_ = [("dwLowDateTime", ctypes.wintypes.DWORD), + ("dwHighDateTime", ctypes.wintypes.DWORD)] + + +class BY_HANDLE_FILE_INFORMATION(ctypes.Structure): + _fields_ = [('dwFileAttributes', ctypes.wintypes.DWORD), + ('ftCreationTime', FILETIME), + ('ftLastAccessTime', FILETIME), + ('ftLastWriteTime', FILETIME), + ('dwVolumeSerialNumber', ctypes.wintypes.DWORD), + ('nFileSizeHigh', ctypes.wintypes.DWORD), + ('nFileSizeLow', ctypes.wintypes.DWORD), + ('nNumberOfLinks', ctypes.wintypes.DWORD), + ('nFileIndexHigh', ctypes.wintypes.DWORD), + ('nFileIndexLow', ctypes.wintypes.DWORD)] + + +CreateFile = ctypes.windll.kernel32.CreateFileW +CreateFile.restype = ctypes.wintypes.HANDLE +CreateFile.argtypes = ( + ctypes.c_wchar_p, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.c_void_p, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.HANDLE, +) + +GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle +GetFileInformationByHandle.restype = ctypes.wintypes.BOOL +GetFileInformationByHandle.argtypes = ( + ctypes.wintypes.HANDLE, + ctypes.wintypes.POINTER(BY_HANDLE_FILE_INFORMATION), +) + +CloseHandle = ctypes.windll.kernel32.CloseHandle +CloseHandle.restype = ctypes.wintypes.BOOL +CloseHandle.argtypes = (ctypes.wintypes.HANDLE,) + + +StatResult = namedtuple('StatResult', 'st_dev st_ino st_mode st_mtime') + +def _to_mode(attr): + m = 0 + if (attr & FILE_ATTRIBUTE_DIRECTORY): + m |= stdstat.S_IFDIR | 0o111 + else: + m |= stdstat.S_IFREG + if (attr & FILE_ATTRIBUTE_READONLY): + m |= 0o444 + else: + m |= 0o666 + return m + +def _to_unix_time(ft): + t = (ft.dwHighDateTime) << 32 | ft.dwLowDateTime + return (t / 10000000) - 11644473600 + +def stat(path): + hfile = CreateFile(path, + FILE_READ_ATTRIBUTES, + 0, + None, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, + None) + if hfile == INVALID_HANDLE_VALUE: + raise ctypes.WinError() + info = BY_HANDLE_FILE_INFORMATION() + r = GetFileInformationByHandle(hfile, info) + CloseHandle(hfile) + if not r: + raise ctypes.WinError() + return StatResult(st_dev=info.dwVolumeSerialNumber, + st_ino=(info.nFileIndexHigh << 32) + info.nFileIndexLow, + st_mode=_to_mode(info.dwFileAttributes), + st_mtime=_to_unix_time(info.ftLastWriteTime) + ) diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/version.py b/flask/venv/lib/python3.6/site-packages/watchdog/version.py new file mode 100644 index 0000000..295497c --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/version.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# When updating this version number, please update the +# ``docs/source/global.rst.inc`` file as well. +VERSION_MAJOR = 0 +VERSION_MINOR = 9 +VERSION_BUILD = 0 +VERSION_INFO = (VERSION_MAJOR, VERSION_MINOR, VERSION_BUILD) +VERSION_STRING = "%d.%d.%d" % VERSION_INFO + +__version__ = VERSION_INFO diff --git a/flask/venv/lib/python3.6/site-packages/watchdog/watchmedo.py b/flask/venv/lib/python3.6/site-packages/watchdog/watchmedo.py new file mode 100644 index 0000000..ce891f8 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/watchdog/watchmedo.py @@ -0,0 +1,577 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2011 Yesudeep Mangalapilly +# Copyright 2012 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +:module: watchdog.watchmedo +:author: yesudeep@google.com (Yesudeep Mangalapilly) +:synopsis: ``watchmedo`` shell script utility. +""" + +import os.path +import sys +import yaml +import time +import logging + +try: + from cStringIO import StringIO +except ImportError: + try: + from StringIO import StringIO + except ImportError: + from io import StringIO + +from argh import arg, aliases, ArghParser, expects_obj +from watchdog.version import VERSION_STRING +from watchdog.utils import load_class + + +logging.basicConfig(level=logging.INFO) + +CONFIG_KEY_TRICKS = 'tricks' +CONFIG_KEY_PYTHON_PATH = 'python-path' + + +def path_split(pathname_spec, separator=os.path.sep): + """ + Splits a pathname specification separated by an OS-dependent separator. + + :param pathname_spec: + The pathname specification. + :param separator: + (OS Dependent) `:` on Unix and `;` on Windows or user-specified. + """ + return list(pathname_spec.split(separator)) + + +def add_to_sys_path(pathnames, index=0): + """ + Adds specified paths at specified index into the sys.path list. + + :param paths: + A list of paths to add to the sys.path + :param index: + (Default 0) The index in the sys.path list where the paths will be + added. + """ + for pathname in pathnames[::-1]: + sys.path.insert(index, pathname) + + +def load_config(tricks_file_pathname): + """ + Loads the YAML configuration from the specified file. + + :param tricks_file_path: + The path to the tricks configuration file. + :returns: + A dictionary of configuration information. + """ + f = open(tricks_file_pathname, 'rb') + content = f.read() + f.close() + config = yaml.load(content) + return config + + +def parse_patterns(patterns_spec, ignore_patterns_spec, separator=';'): + """ + Parses pattern argument specs and returns a two-tuple of + (patterns, ignore_patterns). + """ + patterns = patterns_spec.split(separator) + ignore_patterns = ignore_patterns_spec.split(separator) + if ignore_patterns == ['']: + ignore_patterns = [] + return (patterns, ignore_patterns) + + +def observe_with(observer, event_handler, pathnames, recursive): + """ + Single observer thread with a scheduled path and event handler. + + :param observer: + The observer thread. + :param event_handler: + Event handler which will be called in response to file system events. + :param pathnames: + A list of pathnames to monitor. + :param recursive: + ``True`` if recursive; ``False`` otherwise. + """ + for pathname in set(pathnames): + observer.schedule(event_handler, pathname, recursive) + observer.start() + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + observer.stop() + observer.join() + + +def schedule_tricks(observer, tricks, pathname, recursive): + """ + Schedules tricks with the specified observer and for the given watch + path. + + :param observer: + The observer thread into which to schedule the trick and watch. + :param tricks: + A list of tricks. + :param pathname: + A path name which should be watched. + :param recursive: + ``True`` if recursive; ``False`` otherwise. + """ + for trick in tricks: + for name, value in list(trick.items()): + TrickClass = load_class(name) + handler = TrickClass(**value) + trick_pathname = getattr(handler, 'source_directory', None) or pathname + observer.schedule(handler, trick_pathname, recursive) + + +@aliases('tricks') +@arg('files', + nargs='*', + help='perform tricks from given file') +@arg('--python-path', + default='.', + help='paths separated by %s to add to the python path' % os.path.sep) +@arg('--interval', + '--timeout', + dest='timeout', + default=1.0, + help='use this as the polling interval/blocking timeout') +@arg('--recursive', + default=True, + help='recursively monitor paths') +@expects_obj +def tricks_from(args): + """ + Subcommand to execute tricks from a tricks configuration file. + + :param args: + Command line argument options. + """ + from watchdog.observers import Observer + + add_to_sys_path(path_split(args.python_path)) + observers = [] + for tricks_file in args.files: + observer = Observer(timeout=args.timeout) + + if not os.path.exists(tricks_file): + raise IOError("cannot find tricks file: %s" % tricks_file) + + config = load_config(tricks_file) + + try: + tricks = config[CONFIG_KEY_TRICKS] + except KeyError: + raise KeyError("No `%s' key specified in %s." % ( + CONFIG_KEY_TRICKS, tricks_file)) + + if CONFIG_KEY_PYTHON_PATH in config: + add_to_sys_path(config[CONFIG_KEY_PYTHON_PATH]) + + dir_path = os.path.dirname(tricks_file) + if not dir_path: + dir_path = os.path.relpath(os.getcwd()) + schedule_tricks(observer, tricks, dir_path, args.recursive) + observer.start() + observers.append(observer) + + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + for o in observers: + o.unschedule_all() + o.stop() + for o in observers: + o.join() + + +@aliases('generate-tricks-yaml') +@arg('trick_paths', + nargs='*', + help='Dotted paths for all the tricks you want to generate') +@arg('--python-path', + default='.', + help='paths separated by %s to add to the python path' % os.path.sep) +@arg('--append-to-file', + default=None, + help='appends the generated tricks YAML to a file; \ +if not specified, prints to standard output') +@arg('-a', + '--append-only', + dest='append_only', + default=False, + help='if --append-to-file is not specified, produces output for \ +appending instead of a complete tricks yaml file.') +@expects_obj +def tricks_generate_yaml(args): + """ + Subcommand to generate Yaml configuration for tricks named on the command + line. + + :param args: + Command line argument options. + """ + python_paths = path_split(args.python_path) + add_to_sys_path(python_paths) + output = StringIO() + + for trick_path in args.trick_paths: + TrickClass = load_class(trick_path) + output.write(TrickClass.generate_yaml()) + + content = output.getvalue() + output.close() + + header = yaml.dump({CONFIG_KEY_PYTHON_PATH: python_paths}) + header += "%s:\n" % CONFIG_KEY_TRICKS + if args.append_to_file is None: + # Output to standard output. + if not args.append_only: + content = header + content + sys.stdout.write(content) + else: + if not os.path.exists(args.append_to_file): + content = header + content + output = open(args.append_to_file, 'ab') + output.write(content) + output.close() + + +@arg('directories', + nargs='*', + default='.', + help='directories to watch.') +@arg('-p', + '--pattern', + '--patterns', + dest='patterns', + default='*', + help='matches event paths with these patterns (separated by ;).') +@arg('-i', + '--ignore-pattern', + '--ignore-patterns', + dest='ignore_patterns', + default='', + help='ignores event paths with these patterns (separated by ;).') +@arg('-D', + '--ignore-directories', + dest='ignore_directories', + default=False, + help='ignores events for directories') +@arg('-R', + '--recursive', + dest='recursive', + default=False, + help='monitors the directories recursively') +@arg('--interval', + '--timeout', + dest='timeout', + default=1.0, + help='use this as the polling interval/blocking timeout') +@arg('--trace', + default=False, + help='dumps complete dispatching trace') +@arg('--debug-force-polling', + default=False, + help='[debug] forces polling') +@arg('--debug-force-kqueue', + default=False, + help='[debug] forces BSD kqueue(2)') +@arg('--debug-force-winapi', + default=False, + help='[debug] forces Windows API') +@arg('--debug-force-winapi-async', + default=False, + help='[debug] forces Windows API + I/O completion') +@arg('--debug-force-fsevents', + default=False, + help='[debug] forces Mac OS X FSEvents') +@arg('--debug-force-inotify', + default=False, + help='[debug] forces Linux inotify(7)') +@expects_obj +def log(args): + """ + Subcommand to log file system events to the console. + + :param args: + Command line argument options. + """ + from watchdog.utils import echo + from watchdog.tricks import LoggerTrick + + if args.trace: + echo.echo_class(LoggerTrick) + + patterns, ignore_patterns =\ + parse_patterns(args.patterns, args.ignore_patterns) + handler = LoggerTrick(patterns=patterns, + ignore_patterns=ignore_patterns, + ignore_directories=args.ignore_directories) + if args.debug_force_polling: + from watchdog.observers.polling import PollingObserver as Observer + elif args.debug_force_kqueue: + from watchdog.observers.kqueue import KqueueObserver as Observer + elif args.debug_force_winapi_async: + from watchdog.observers.read_directory_changes_async import\ + WindowsApiAsyncObserver as Observer + elif args.debug_force_winapi: + from watchdog.observers.read_directory_changes import\ + WindowsApiObserver as Observer + elif args.debug_force_inotify: + from watchdog.observers.inotify import InotifyObserver as Observer + elif args.debug_force_fsevents: + from watchdog.observers.fsevents import FSEventsObserver as Observer + else: + # Automatically picks the most appropriate observer for the platform + # on which it is running. + from watchdog.observers import Observer + observer = Observer(timeout=args.timeout) + observe_with(observer, handler, args.directories, args.recursive) + + +@arg('directories', + nargs='*', + default='.', + help='directories to watch') +@arg('-c', + '--command', + dest='command', + default=None, + help='''shell command executed in response to matching events. +These interpolation variables are available to your command string:: + + ${watch_src_path} - event source path; + ${watch_dest_path} - event destination path (for moved events); + ${watch_event_type} - event type; + ${watch_object} - ``file`` or ``directory`` + +Note:: + Please ensure you do not use double quotes (") to quote + your command string. That will force your shell to + interpolate before the command is processed by this + subcommand. + +Example option usage:: + + --command='echo "${watch_src_path}"' +''') +@arg('-p', + '--pattern', + '--patterns', + dest='patterns', + default='*', + help='matches event paths with these patterns (separated by ;).') +@arg('-i', + '--ignore-pattern', + '--ignore-patterns', + dest='ignore_patterns', + default='', + help='ignores event paths with these patterns (separated by ;).') +@arg('-D', + '--ignore-directories', + dest='ignore_directories', + default=False, + help='ignores events for directories') +@arg('-R', + '--recursive', + dest='recursive', + default=False, + help='monitors the directories recursively') +@arg('--interval', + '--timeout', + dest='timeout', + default=1.0, + help='use this as the polling interval/blocking timeout') +@arg('-w', '--wait', + dest='wait_for_process', + action='store_true', + default=False, + help="wait for process to finish to avoid multiple simultaneous instances") +@arg('-W', '--drop', + dest='drop_during_process', + action='store_true', + default=False, + help="Ignore events that occur while command is still being executed " \ + "to avoid multiple simultaneous instances") +@arg('--debug-force-polling', + default=False, + help='[debug] forces polling') +@expects_obj +def shell_command(args): + """ + Subcommand to execute shell commands in response to file system events. + + :param args: + Command line argument options. + """ + from watchdog.tricks import ShellCommandTrick + + if not args.command: + args.command = None + + if args.debug_force_polling: + from watchdog.observers.polling import PollingObserver as Observer + else: + from watchdog.observers import Observer + + patterns, ignore_patterns = parse_patterns(args.patterns, + args.ignore_patterns) + handler = ShellCommandTrick(shell_command=args.command, + patterns=patterns, + ignore_patterns=ignore_patterns, + ignore_directories=args.ignore_directories, + wait_for_process=args.wait_for_process, + drop_during_process=args.drop_during_process) + observer = Observer(timeout=args.timeout) + observe_with(observer, handler, args.directories, args.recursive) + + +@arg('command', + help='''Long-running command to run in a subprocess. +''') +@arg('command_args', + metavar='arg', + nargs='*', + help='''Command arguments. + +Note: Use -- before the command arguments, otherwise watchmedo will +try to interpret them. +''') +@arg('-d', + '--directory', + dest='directories', + metavar='directory', + action='append', + help='Directory to watch. Use another -d or --directory option ' + 'for each directory.') +@arg('-p', + '--pattern', + '--patterns', + dest='patterns', + default='*', + help='matches event paths with these patterns (separated by ;).') +@arg('-i', + '--ignore-pattern', + '--ignore-patterns', + dest='ignore_patterns', + default='', + help='ignores event paths with these patterns (separated by ;).') +@arg('-D', + '--ignore-directories', + dest='ignore_directories', + default=False, + help='ignores events for directories') +@arg('-R', + '--recursive', + dest='recursive', + default=False, + help='monitors the directories recursively') +@arg('--interval', + '--timeout', + dest='timeout', + default=1.0, + help='use this as the polling interval/blocking timeout') +@arg('--signal', + dest='signal', + default='SIGINT', + help='stop the subprocess with this signal (default SIGINT)') +@arg('--kill-after', + dest='kill_after', + default=10.0, + help='when stopping, kill the subprocess after the specified timeout ' + '(default 10)') +@expects_obj +def auto_restart(args): + """ + Subcommand to start a long-running subprocess and restart it + on matched events. + + :param args: + Command line argument options. + """ + from watchdog.observers import Observer + from watchdog.tricks import AutoRestartTrick + import signal + import re + + if not args.directories: + args.directories = ['.'] + + # Allow either signal name or number. + if re.match('^SIG[A-Z]+$', args.signal): + stop_signal = getattr(signal, args.signal) + else: + stop_signal = int(args.signal) + + # Handle SIGTERM in the same manner as SIGINT so that + # this program has a chance to stop the child process. + def handle_sigterm(_signum, _frame): + raise KeyboardInterrupt() + + signal.signal(signal.SIGTERM, handle_sigterm) + + patterns, ignore_patterns = parse_patterns(args.patterns, + args.ignore_patterns) + command = [args.command] + command.extend(args.command_args) + handler = AutoRestartTrick(command=command, + patterns=patterns, + ignore_patterns=ignore_patterns, + ignore_directories=args.ignore_directories, + stop_signal=stop_signal, + kill_after=args.kill_after) + handler.start() + observer = Observer(timeout=args.timeout) + observe_with(observer, handler, args.directories, args.recursive) + handler.stop() + + +epilog = """Copyright 2011 Yesudeep Mangalapilly . +Copyright 2012 Google, Inc. + +Licensed under the terms of the Apache license, version 2.0. Please see +LICENSE in the source code for more information.""" + +parser = ArghParser(epilog=epilog) +parser.add_commands([tricks_from, + tricks_generate_yaml, + log, + shell_command, + auto_restart]) +parser.add_argument('--version', + action='version', + version='%(prog)s ' + VERSION_STRING) + + +def main(): + """Entry-point function.""" + parser.dispatch() + + +if __name__ == '__main__': + main() diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__init__.py b/flask/venv/lib/python3.6/site-packages/werkzeug/__init__.py new file mode 100644 index 0000000..56404f4 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/__init__.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +""" + werkzeug + ~~~~~~~~ + + Werkzeug is the Swiss Army knife of Python web development. + + It provides useful classes and functions for any WSGI application to make + the life of a python web developer much easier. All of the provided + classes are independent from each other so you can mix it with any other + library. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from types import ModuleType +import sys + +from werkzeug._compat import iteritems + +__version__ = '0.14.1' + + +# This import magic raises concerns quite often which is why the implementation +# and motivation is explained here in detail now. +# +# The majority of the functions and classes provided by Werkzeug work on the +# HTTP and WSGI layer. There is no useful grouping for those which is why +# they are all importable from "werkzeug" instead of the modules where they are +# implemented. The downside of that is, that now everything would be loaded at +# once, even if unused. +# +# The implementation of a lazy-loading module in this file replaces the +# werkzeug package when imported from within. Attribute access to the werkzeug +# module will then lazily import from the modules that implement the objects. + + +# import mapping to objects in other modules +all_by_module = { + 'werkzeug.debug': ['DebuggedApplication'], + 'werkzeug.local': ['Local', 'LocalManager', 'LocalProxy', 'LocalStack', + 'release_local'], + 'werkzeug.serving': ['run_simple'], + 'werkzeug.test': ['Client', 'EnvironBuilder', 'create_environ', + 'run_wsgi_app'], + 'werkzeug.testapp': ['test_app'], + 'werkzeug.exceptions': ['abort', 'Aborter'], + 'werkzeug.urls': ['url_decode', 'url_encode', 'url_quote', + 'url_quote_plus', 'url_unquote', 'url_unquote_plus', + 'url_fix', 'Href', 'iri_to_uri', 'uri_to_iri'], + 'werkzeug.formparser': ['parse_form_data'], + 'werkzeug.utils': ['escape', 'environ_property', 'append_slash_redirect', + 'redirect', 'cached_property', 'import_string', + 'dump_cookie', 'parse_cookie', 'unescape', + 'format_string', 'find_modules', 'header_property', + 'html', 'xhtml', 'HTMLBuilder', 'validate_arguments', + 'ArgumentValidationError', 'bind_arguments', + 'secure_filename'], + 'werkzeug.wsgi': ['get_current_url', 'get_host', 'pop_path_info', + 'peek_path_info', 'SharedDataMiddleware', + 'DispatcherMiddleware', 'ClosingIterator', 'FileWrapper', + 'make_line_iter', 'LimitedStream', 'responder', + 'wrap_file', 'extract_path_info'], + 'werkzeug.datastructures': ['MultiDict', 'CombinedMultiDict', 'Headers', + 'EnvironHeaders', 'ImmutableList', + 'ImmutableDict', 'ImmutableMultiDict', + 'TypeConversionDict', + 'ImmutableTypeConversionDict', 'Accept', + 'MIMEAccept', 'CharsetAccept', + 'LanguageAccept', 'RequestCacheControl', + 'ResponseCacheControl', 'ETags', 'HeaderSet', + 'WWWAuthenticate', 'Authorization', + 'FileMultiDict', 'CallbackDict', 'FileStorage', + 'OrderedMultiDict', 'ImmutableOrderedMultiDict' + ], + 'werkzeug.useragents': ['UserAgent'], + 'werkzeug.http': ['parse_etags', 'parse_date', 'http_date', 'cookie_date', + 'parse_cache_control_header', 'is_resource_modified', + 'parse_accept_header', 'parse_set_header', 'quote_etag', + 'unquote_etag', 'generate_etag', 'dump_header', + 'parse_list_header', 'parse_dict_header', + 'parse_authorization_header', + 'parse_www_authenticate_header', 'remove_entity_headers', + 'is_entity_header', 'remove_hop_by_hop_headers', + 'parse_options_header', 'dump_options_header', + 'is_hop_by_hop_header', 'unquote_header_value', + 'quote_header_value', 'HTTP_STATUS_CODES'], + 'werkzeug.wrappers': ['BaseResponse', 'BaseRequest', 'Request', 'Response', + 'AcceptMixin', 'ETagRequestMixin', + 'ETagResponseMixin', 'ResponseStreamMixin', + 'CommonResponseDescriptorsMixin', 'UserAgentMixin', + 'AuthorizationMixin', 'WWWAuthenticateMixin', + 'CommonRequestDescriptorsMixin'], + 'werkzeug.security': ['generate_password_hash', 'check_password_hash'], + # the undocumented easteregg ;-) + 'werkzeug._internal': ['_easteregg'] +} + +# modules that should be imported when accessed as attributes of werkzeug +attribute_modules = frozenset(['exceptions', 'routing']) + + +object_origins = {} +for module, items in iteritems(all_by_module): + for item in items: + object_origins[item] = module + + +class module(ModuleType): + + """Automatically import objects from the modules.""" + + def __getattr__(self, name): + if name in object_origins: + module = __import__(object_origins[name], None, None, [name]) + for extra_name in all_by_module[module.__name__]: + setattr(self, extra_name, getattr(module, extra_name)) + return getattr(module, name) + elif name in attribute_modules: + __import__('werkzeug.' + name) + return ModuleType.__getattribute__(self, name) + + def __dir__(self): + """Just show what we want to show.""" + result = list(new_module.__all__) + result.extend(('__file__', '__doc__', '__all__', + '__docformat__', '__name__', '__path__', + '__package__', '__version__')) + return result + +# keep a reference to this module so that it's not garbage collected +old_module = sys.modules['werkzeug'] + + +# setup the new module and patch it into the dict of loaded modules +new_module = sys.modules['werkzeug'] = module('werkzeug') +new_module.__dict__.update({ + '__file__': __file__, + '__package__': 'werkzeug', + '__path__': __path__, + '__doc__': __doc__, + '__version__': __version__, + '__all__': tuple(object_origins) + tuple(attribute_modules), + '__docformat__': 'restructuredtext en' +}) + + +# Due to bootstrapping issues we need to import exceptions here. +# Don't ask :-( +__import__('werkzeug.exceptions') diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..6072b53 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_compat.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 0000000..fd2bd54 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_compat.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_internal.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_internal.cpython-36.pyc new file mode 100644 index 0000000..0836734 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_internal.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_reloader.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_reloader.cpython-36.pyc new file mode 100644 index 0000000..4695eac Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/_reloader.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/datastructures.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/datastructures.cpython-36.pyc new file mode 100644 index 0000000..5c3a15a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/datastructures.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/exceptions.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 0000000..fb77380 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/exceptions.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/filesystem.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/filesystem.cpython-36.pyc new file mode 100644 index 0000000..11ed599 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/filesystem.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/formparser.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/formparser.cpython-36.pyc new file mode 100644 index 0000000..9acd2b6 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/formparser.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/http.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/http.cpython-36.pyc new file mode 100644 index 0000000..bfb2058 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/http.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/local.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/local.cpython-36.pyc new file mode 100644 index 0000000..2bf44dc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/local.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/posixemulation.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/posixemulation.cpython-36.pyc new file mode 100644 index 0000000..0608c3d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/posixemulation.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/routing.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/routing.cpython-36.pyc new file mode 100644 index 0000000..31ff9e7 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/routing.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/script.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/script.cpython-36.pyc new file mode 100644 index 0000000..036f08b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/script.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/security.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/security.cpython-36.pyc new file mode 100644 index 0000000..8986ec1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/security.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/serving.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/serving.cpython-36.pyc new file mode 100644 index 0000000..3ce7f5a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/serving.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/test.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/test.cpython-36.pyc new file mode 100644 index 0000000..1296509 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/test.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/testapp.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/testapp.cpython-36.pyc new file mode 100644 index 0000000..22ffcd5 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/testapp.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/urls.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/urls.cpython-36.pyc new file mode 100644 index 0000000..7f3cef8 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/urls.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/useragents.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/useragents.cpython-36.pyc new file mode 100644 index 0000000..a556748 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/useragents.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/utils.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..35c8263 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/utils.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/websocket.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/websocket.cpython-36.pyc new file mode 100644 index 0000000..b499b61 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/websocket.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wrappers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wrappers.cpython-36.pyc new file mode 100644 index 0000000..20b86ac Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wrappers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wsgi.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wsgi.cpython-36.pyc new file mode 100644 index 0000000..e022538 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/__pycache__/wsgi.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/_compat.py b/flask/venv/lib/python3.6/site-packages/werkzeug/_compat.py new file mode 100644 index 0000000..fda038b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/_compat.py @@ -0,0 +1,206 @@ +# flake8: noqa +# This whole file is full of lint errors +import codecs +import sys +import operator +import functools +import warnings + +try: + import builtins +except ImportError: + import __builtin__ as builtins + + +PY2 = sys.version_info[0] == 2 +WIN = sys.platform.startswith('win') + +_identity = lambda x: x + +if PY2: + unichr = unichr + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + iterkeys = lambda d, *args, **kwargs: d.iterkeys(*args, **kwargs) + itervalues = lambda d, *args, **kwargs: d.itervalues(*args, **kwargs) + iteritems = lambda d, *args, **kwargs: d.iteritems(*args, **kwargs) + + iterlists = lambda d, *args, **kwargs: d.iterlists(*args, **kwargs) + iterlistvalues = lambda d, *args, **kwargs: d.iterlistvalues(*args, **kwargs) + + int_to_byte = chr + iter_bytes = iter + + exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + + def fix_tuple_repr(obj): + def __repr__(self): + cls = self.__class__ + return '%s(%s)' % (cls.__name__, ', '.join( + '%s=%r' % (field, self[index]) + for index, field in enumerate(cls._fields) + )) + obj.__repr__ = __repr__ + return obj + + def implements_iterator(cls): + cls.next = cls.__next__ + del cls.__next__ + return cls + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + return cls + + def native_string_result(func): + def wrapper(*args, **kwargs): + return func(*args, **kwargs).encode('utf-8') + return functools.update_wrapper(wrapper, func) + + def implements_bool(cls): + cls.__nonzero__ = cls.__bool__ + del cls.__bool__ + return cls + + from itertools import imap, izip, ifilter + range_type = xrange + + from StringIO import StringIO + from cStringIO import StringIO as BytesIO + NativeStringIO = BytesIO + + def make_literal_wrapper(reference): + return _identity + + def normalize_string_tuple(tup): + """Normalizes a string tuple to a common type. Following Python 2 + rules, upgrades to unicode are implicit. + """ + if any(isinstance(x, text_type) for x in tup): + return tuple(to_unicode(x) for x in tup) + return tup + + def try_coerce_native(s): + """Try to coerce a unicode string to native if possible. Otherwise, + leave it as unicode. + """ + try: + return to_native(s) + except UnicodeError: + return s + + wsgi_get_bytes = _identity + + def wsgi_decoding_dance(s, charset='utf-8', errors='replace'): + return s.decode(charset, errors) + + def wsgi_encoding_dance(s, charset='utf-8', errors='replace'): + if isinstance(s, bytes): + return s + return s.encode(charset, errors) + + def to_bytes(x, charset=sys.getdefaultencoding(), errors='strict'): + if x is None: + return None + if isinstance(x, (bytes, bytearray, buffer)): + return bytes(x) + if isinstance(x, unicode): + return x.encode(charset, errors) + raise TypeError('Expected bytes') + + def to_native(x, charset=sys.getdefaultencoding(), errors='strict'): + if x is None or isinstance(x, str): + return x + return x.encode(charset, errors) + +else: + unichr = chr + text_type = str + string_types = (str, ) + integer_types = (int, ) + + iterkeys = lambda d, *args, **kwargs: iter(d.keys(*args, **kwargs)) + itervalues = lambda d, *args, **kwargs: iter(d.values(*args, **kwargs)) + iteritems = lambda d, *args, **kwargs: iter(d.items(*args, **kwargs)) + + iterlists = lambda d, *args, **kwargs: iter(d.lists(*args, **kwargs)) + iterlistvalues = lambda d, *args, **kwargs: iter(d.listvalues(*args, **kwargs)) + + int_to_byte = operator.methodcaller('to_bytes', 1, 'big') + iter_bytes = functools.partial(map, int_to_byte) + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + fix_tuple_repr = _identity + implements_iterator = _identity + implements_to_string = _identity + implements_bool = _identity + native_string_result = _identity + imap = map + izip = zip + ifilter = filter + range_type = range + + from io import StringIO, BytesIO + NativeStringIO = StringIO + + _latin1_encode = operator.methodcaller('encode', 'latin1') + + def make_literal_wrapper(reference): + if isinstance(reference, text_type): + return _identity + return _latin1_encode + + def normalize_string_tuple(tup): + """Ensures that all types in the tuple are either strings + or bytes. + """ + tupiter = iter(tup) + is_text = isinstance(next(tupiter, None), text_type) + for arg in tupiter: + if isinstance(arg, text_type) != is_text: + raise TypeError('Cannot mix str and bytes arguments (got %s)' + % repr(tup)) + return tup + + try_coerce_native = _identity + wsgi_get_bytes = _latin1_encode + + def wsgi_decoding_dance(s, charset='utf-8', errors='replace'): + return s.encode('latin1').decode(charset, errors) + + def wsgi_encoding_dance(s, charset='utf-8', errors='replace'): + if isinstance(s, text_type): + s = s.encode(charset) + return s.decode('latin1', errors) + + def to_bytes(x, charset=sys.getdefaultencoding(), errors='strict'): + if x is None: + return None + if isinstance(x, (bytes, bytearray, memoryview)): # noqa + return bytes(x) + if isinstance(x, str): + return x.encode(charset, errors) + raise TypeError('Expected bytes') + + def to_native(x, charset=sys.getdefaultencoding(), errors='strict'): + if x is None or isinstance(x, str): + return x + return x.decode(charset, errors) + + +def to_unicode(x, charset=sys.getdefaultencoding(), errors='strict', + allow_none_charset=False): + if x is None: + return None + if not isinstance(x, bytes): + return text_type(x) + if charset is None and allow_none_charset: + return x + return x.decode(charset, errors) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/_internal.py b/flask/venv/lib/python3.6/site-packages/werkzeug/_internal.py new file mode 100644 index 0000000..28bfd9f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/_internal.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +""" + werkzeug._internal + ~~~~~~~~~~~~~~~~~~ + + This module provides internally used helpers and constants. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import string +import inspect +from weakref import WeakKeyDictionary +from datetime import datetime, date +from itertools import chain + +from werkzeug._compat import iter_bytes, text_type, BytesIO, int_to_byte, \ + range_type, integer_types + + +_logger = None +_empty_stream = BytesIO() +_signature_cache = WeakKeyDictionary() +_epoch_ord = date(1970, 1, 1).toordinal() +_cookie_params = set((b'expires', b'path', b'comment', + b'max-age', b'secure', b'httponly', + b'version')) +_legal_cookie_chars = (string.ascii_letters + + string.digits + + u"/=!#$%&'*+-.^_`|~:").encode('ascii') + +_cookie_quoting_map = { + b',': b'\\054', + b';': b'\\073', + b'"': b'\\"', + b'\\': b'\\\\', +} +for _i in chain(range_type(32), range_type(127, 256)): + _cookie_quoting_map[int_to_byte(_i)] = ('\\%03o' % _i).encode('latin1') + + +_octal_re = re.compile(br'\\[0-3][0-7][0-7]') +_quote_re = re.compile(br'[\\].') +_legal_cookie_chars_re = br'[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]' +_cookie_re = re.compile(br""" + (?P[^=;]+) + (?:\s*=\s* + (?P + "(?:[^\\"]|\\.)*" | + (?:.*?) + ) + )? + \s*; +""", flags=re.VERBOSE) + + +class _Missing(object): + + def __repr__(self): + return 'no value' + + def __reduce__(self): + return '_missing' + +_missing = _Missing() + + +def _get_environ(obj): + env = getattr(obj, 'environ', obj) + assert isinstance(env, dict), \ + '%r is not a WSGI environment (has to be a dict)' % type(obj).__name__ + return env + + +def _log(type, message, *args, **kwargs): + """Log into the internal werkzeug logger.""" + global _logger + if _logger is None: + import logging + _logger = logging.getLogger('werkzeug') + # Only set up a default log handler if the + # end-user application didn't set anything up. + if not logging.root.handlers and _logger.level == logging.NOTSET: + _logger.setLevel(logging.INFO) + handler = logging.StreamHandler() + _logger.addHandler(handler) + getattr(_logger, type)(message.rstrip(), *args, **kwargs) + + +def _parse_signature(func): + """Return a signature object for the function.""" + if hasattr(func, 'im_func'): + func = func.im_func + + # if we have a cached validator for this function, return it + parse = _signature_cache.get(func) + if parse is not None: + return parse + + # inspect the function signature and collect all the information + if hasattr(inspect, 'getfullargspec'): + tup = inspect.getfullargspec(func) + else: + tup = inspect.getargspec(func) + positional, vararg_var, kwarg_var, defaults = tup[:4] + defaults = defaults or () + arg_count = len(positional) + arguments = [] + for idx, name in enumerate(positional): + if isinstance(name, list): + raise TypeError('cannot parse functions that unpack tuples ' + 'in the function signature') + try: + default = defaults[idx - arg_count] + except IndexError: + param = (name, False, None) + else: + param = (name, True, default) + arguments.append(param) + arguments = tuple(arguments) + + def parse(args, kwargs): + new_args = [] + missing = [] + extra = {} + + # consume as many arguments as positional as possible + for idx, (name, has_default, default) in enumerate(arguments): + try: + new_args.append(args[idx]) + except IndexError: + try: + new_args.append(kwargs.pop(name)) + except KeyError: + if has_default: + new_args.append(default) + else: + missing.append(name) + else: + if name in kwargs: + extra[name] = kwargs.pop(name) + + # handle extra arguments + extra_positional = args[arg_count:] + if vararg_var is not None: + new_args.extend(extra_positional) + extra_positional = () + if kwargs and kwarg_var is None: + extra.update(kwargs) + kwargs = {} + + return new_args, kwargs, missing, extra, extra_positional, \ + arguments, vararg_var, kwarg_var + _signature_cache[func] = parse + return parse + + +def _date_to_unix(arg): + """Converts a timetuple, integer or datetime object into the seconds from + epoch in utc. + """ + if isinstance(arg, datetime): + arg = arg.utctimetuple() + elif isinstance(arg, integer_types + (float,)): + return int(arg) + year, month, day, hour, minute, second = arg[:6] + days = date(year, month, 1).toordinal() - _epoch_ord + day - 1 + hours = days * 24 + hour + minutes = hours * 60 + minute + seconds = minutes * 60 + second + return seconds + + +class _DictAccessorProperty(object): + + """Baseclass for `environ_property` and `header_property`.""" + read_only = False + + def __init__(self, name, default=None, load_func=None, dump_func=None, + read_only=None, doc=None): + self.name = name + self.default = default + self.load_func = load_func + self.dump_func = dump_func + if read_only is not None: + self.read_only = read_only + self.__doc__ = doc + + def __get__(self, obj, type=None): + if obj is None: + return self + storage = self.lookup(obj) + if self.name not in storage: + return self.default + rv = storage[self.name] + if self.load_func is not None: + try: + rv = self.load_func(rv) + except (ValueError, TypeError): + rv = self.default + return rv + + def __set__(self, obj, value): + if self.read_only: + raise AttributeError('read only property') + if self.dump_func is not None: + value = self.dump_func(value) + self.lookup(obj)[self.name] = value + + def __delete__(self, obj): + if self.read_only: + raise AttributeError('read only property') + self.lookup(obj).pop(self.name, None) + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, + self.name + ) + + +def _cookie_quote(b): + buf = bytearray() + all_legal = True + _lookup = _cookie_quoting_map.get + _push = buf.extend + + for char in iter_bytes(b): + if char not in _legal_cookie_chars: + all_legal = False + char = _lookup(char, char) + _push(char) + + if all_legal: + return bytes(buf) + return bytes(b'"' + buf + b'"') + + +def _cookie_unquote(b): + if len(b) < 2: + return b + if b[:1] != b'"' or b[-1:] != b'"': + return b + + b = b[1:-1] + + i = 0 + n = len(b) + rv = bytearray() + _push = rv.extend + + while 0 <= i < n: + o_match = _octal_re.search(b, i) + q_match = _quote_re.search(b, i) + if not o_match and not q_match: + rv.extend(b[i:]) + break + j = k = -1 + if o_match: + j = o_match.start(0) + if q_match: + k = q_match.start(0) + if q_match and (not o_match or k < j): + _push(b[i:k]) + _push(b[k + 1:k + 2]) + i = k + 2 + else: + _push(b[i:j]) + rv.append(int(b[j + 1:j + 4], 8)) + i = j + 4 + + return bytes(rv) + + +def _cookie_parse_impl(b): + """Lowlevel cookie parsing facility that operates on bytes.""" + i = 0 + n = len(b) + + while i < n: + match = _cookie_re.search(b + b';', i) + if not match: + break + + key = match.group('key').strip() + value = match.group('val') or b'' + i = match.end(0) + + # Ignore parameters. We have no interest in them. + if key.lower() not in _cookie_params: + yield _cookie_unquote(key), _cookie_unquote(value) + + +def _encode_idna(domain): + # If we're given bytes, make sure they fit into ASCII + if not isinstance(domain, text_type): + domain.decode('ascii') + return domain + + # Otherwise check if it's already ascii, then return + try: + return domain.encode('ascii') + except UnicodeError: + pass + + # Otherwise encode each part separately + parts = domain.split('.') + for idx, part in enumerate(parts): + parts[idx] = part.encode('idna') + return b'.'.join(parts) + + +def _decode_idna(domain): + # If the input is a string try to encode it to ascii to + # do the idna decoding. if that fails because of an + # unicode error, then we already have a decoded idna domain + if isinstance(domain, text_type): + try: + domain = domain.encode('ascii') + except UnicodeError: + return domain + + # Decode each part separately. If a part fails, try to + # decode it with ascii and silently ignore errors. This makes + # most sense because the idna codec does not have error handling + parts = domain.split(b'.') + for idx, part in enumerate(parts): + try: + parts[idx] = part.decode('idna') + except UnicodeError: + parts[idx] = part.decode('ascii', 'ignore') + + return '.'.join(parts) + + +def _make_cookie_domain(domain): + if domain is None: + return None + domain = _encode_idna(domain) + if b':' in domain: + domain = domain.split(b':', 1)[0] + if b'.' in domain: + return domain + raise ValueError( + 'Setting \'domain\' for a cookie on a server running locally (ex: ' + 'localhost) is not supported by complying browsers. You should ' + 'have something like: \'127.0.0.1 localhost dev.localhost\' on ' + 'your hosts file and then point your server to run on ' + '\'dev.localhost\' and also set \'domain\' for \'dev.localhost\'' + ) + + +def _easteregg(app=None): + """Like the name says. But who knows how it works?""" + def bzzzzzzz(gyver): + import base64 + import zlib + return zlib.decompress(base64.b64decode(gyver)).decode('ascii') + gyver = u'\n'.join([x + (77 - len(x)) * u' ' for x in bzzzzzzz(b''' +eJyFlzuOJDkMRP06xRjymKgDJCDQStBYT8BCgK4gTwfQ2fcFs2a2FzvZk+hvlcRvRJD148efHt9m +9Xz94dRY5hGt1nrYcXx7us9qlcP9HHNh28rz8dZj+q4rynVFFPdlY4zH873NKCexrDM6zxxRymzz +4QIxzK4bth1PV7+uHn6WXZ5C4ka/+prFzx3zWLMHAVZb8RRUxtFXI5DTQ2n3Hi2sNI+HK43AOWSY +jmEzE4naFp58PdzhPMdslLVWHTGUVpSxImw+pS/D+JhzLfdS1j7PzUMxij+mc2U0I9zcbZ/HcZxc +q1QjvvcThMYFnp93agEx392ZdLJWXbi/Ca4Oivl4h/Y1ErEqP+lrg7Xa4qnUKu5UE9UUA4xeqLJ5 +jWlPKJvR2yhRI7xFPdzPuc6adXu6ovwXwRPXXnZHxlPtkSkqWHilsOrGrvcVWXgGP3daXomCj317 +8P2UOw/NnA0OOikZyFf3zZ76eN9QXNwYdD8f8/LdBRFg0BO3bB+Pe/+G8er8tDJv83XTkj7WeMBJ +v/rnAfdO51d6sFglfi8U7zbnr0u9tyJHhFZNXYfH8Iafv2Oa+DT6l8u9UYlajV/hcEgk1x8E8L/r +XJXl2SK+GJCxtnyhVKv6GFCEB1OO3f9YWAIEbwcRWv/6RPpsEzOkXURMN37J0PoCSYeBnJQd9Giu +LxYQJNlYPSo/iTQwgaihbART7Fcyem2tTSCcwNCs85MOOpJtXhXDe0E7zgZJkcxWTar/zEjdIVCk +iXy87FW6j5aGZhttDBoAZ3vnmlkx4q4mMmCdLtnHkBXFMCReqthSGkQ+MDXLLCpXwBs0t+sIhsDI +tjBB8MwqYQpLygZ56rRHHpw+OAVyGgaGRHWy2QfXez+ZQQTTBkmRXdV/A9LwH6XGZpEAZU8rs4pE +1R4FQ3Uwt8RKEtRc0/CrANUoes3EzM6WYcFyskGZ6UTHJWenBDS7h163Eo2bpzqxNE9aVgEM2CqI +GAJe9Yra4P5qKmta27VjzYdR04Vc7KHeY4vs61C0nbywFmcSXYjzBHdiEjraS7PGG2jHHTpJUMxN +Jlxr3pUuFvlBWLJGE3GcA1/1xxLcHmlO+LAXbhrXah1tD6Ze+uqFGdZa5FM+3eHcKNaEarutAQ0A +QMAZHV+ve6LxAwWnXbbSXEG2DmCX5ijeLCKj5lhVFBrMm+ryOttCAeFpUdZyQLAQkA06RLs56rzG +8MID55vqr/g64Qr/wqwlE0TVxgoiZhHrbY2h1iuuyUVg1nlkpDrQ7Vm1xIkI5XRKLedN9EjzVchu +jQhXcVkjVdgP2O99QShpdvXWoSwkp5uMwyjt3jiWCqWGSiaaPAzohjPanXVLbM3x0dNskJsaCEyz +DTKIs+7WKJD4ZcJGfMhLFBf6hlbnNkLEePF8Cx2o2kwmYF4+MzAxa6i+6xIQkswOqGO+3x9NaZX8 +MrZRaFZpLeVTYI9F/djY6DDVVs340nZGmwrDqTCiiqD5luj3OzwpmQCiQhdRYowUYEA3i1WWGwL4 +GCtSoO4XbIPFeKGU13XPkDf5IdimLpAvi2kVDVQbzOOa4KAXMFlpi/hV8F6IDe0Y2reg3PuNKT3i +RYhZqtkQZqSB2Qm0SGtjAw7RDwaM1roESC8HWiPxkoOy0lLTRFG39kvbLZbU9gFKFRvixDZBJmpi +Xyq3RE5lW00EJjaqwp/v3EByMSpVZYsEIJ4APaHmVtpGSieV5CALOtNUAzTBiw81GLgC0quyzf6c +NlWknzJeCsJ5fup2R4d8CYGN77mu5vnO1UqbfElZ9E6cR6zbHjgsr9ly18fXjZoPeDjPuzlWbFwS +pdvPkhntFvkc13qb9094LL5NrA3NIq3r9eNnop9DizWOqCEbyRBFJTHn6Tt3CG1o8a4HevYh0XiJ +sR0AVVHuGuMOIfbuQ/OKBkGRC6NJ4u7sbPX8bG/n5sNIOQ6/Y/BX3IwRlTSabtZpYLB85lYtkkgm +p1qXK3Du2mnr5INXmT/78KI12n11EFBkJHHp0wJyLe9MvPNUGYsf+170maayRoy2lURGHAIapSpQ +krEDuNoJCHNlZYhKpvw4mspVWxqo415n8cD62N9+EfHrAvqQnINStetek7RY2Urv8nxsnGaZfRr/ +nhXbJ6m/yl1LzYqscDZA9QHLNbdaSTTr+kFg3bC0iYbX/eQy0Bv3h4B50/SGYzKAXkCeOLI3bcAt +mj2Z/FM1vQWgDynsRwNvrWnJHlespkrp8+vO1jNaibm+PhqXPPv30YwDZ6jApe3wUjFQobghvW9p +7f2zLkGNv8b191cD/3vs9Q833z8t''').splitlines()]) + + def easteregged(environ, start_response): + def injecting_start_response(status, headers, exc_info=None): + headers.append(('X-Powered-By', 'Werkzeug')) + return start_response(status, headers, exc_info) + if app is not None and environ.get('QUERY_STRING') != 'macgybarchakku': + return app(environ, injecting_start_response) + injecting_start_response('200 OK', [('Content-Type', 'text/html')]) + return [(u''' + + + +About Werkzeug + + + +

Werkzeug

+

the Swiss Army knife of Python web development.

+
%s\n\n\n
+ +''' % gyver).encode('latin1')] + return easteregged diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/_reloader.py b/flask/venv/lib/python3.6/site-packages/werkzeug/_reloader.py new file mode 100644 index 0000000..0d23dba --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/_reloader.py @@ -0,0 +1,277 @@ +import os +import sys +import time +import subprocess +import threading +from itertools import chain + +from werkzeug._internal import _log +from werkzeug._compat import PY2, iteritems, text_type + + +def _iter_module_files(): + """This iterates over all relevant Python files. It goes through all + loaded files from modules, all files in folders of already loaded modules + as well as all files reachable through a package. + """ + # The list call is necessary on Python 3 in case the module + # dictionary modifies during iteration. + for module in list(sys.modules.values()): + if module is None: + continue + filename = getattr(module, '__file__', None) + if filename: + if os.path.isdir(filename) and \ + os.path.exists(os.path.join(filename, "__init__.py")): + filename = os.path.join(filename, "__init__.py") + + old = None + while not os.path.isfile(filename): + old = filename + filename = os.path.dirname(filename) + if filename == old: + break + else: + if filename[-4:] in ('.pyc', '.pyo'): + filename = filename[:-1] + yield filename + + +def _find_observable_paths(extra_files=None): + """Finds all paths that should be observed.""" + rv = set(os.path.dirname(os.path.abspath(x)) + if os.path.isfile(x) else os.path.abspath(x) + for x in sys.path) + + for filename in extra_files or (): + rv.add(os.path.dirname(os.path.abspath(filename))) + + for module in list(sys.modules.values()): + fn = getattr(module, '__file__', None) + if fn is None: + continue + fn = os.path.abspath(fn) + rv.add(os.path.dirname(fn)) + + return _find_common_roots(rv) + + +def _get_args_for_reloading(): + """Returns the executable. This contains a workaround for windows + if the executable is incorrectly reported to not have the .exe + extension which can cause bugs on reloading. + """ + rv = [sys.executable] + py_script = sys.argv[0] + if os.name == 'nt' and not os.path.exists(py_script) and \ + os.path.exists(py_script + '.exe'): + py_script += '.exe' + if os.path.splitext(rv[0])[1] == '.exe' and os.path.splitext(py_script)[1] == '.exe': + rv.pop(0) + rv.append(py_script) + rv.extend(sys.argv[1:]) + return rv + + +def _find_common_roots(paths): + """Out of some paths it finds the common roots that need monitoring.""" + paths = [x.split(os.path.sep) for x in paths] + root = {} + for chunks in sorted(paths, key=len, reverse=True): + node = root + for chunk in chunks: + node = node.setdefault(chunk, {}) + node.clear() + + rv = set() + + def _walk(node, path): + for prefix, child in iteritems(node): + _walk(child, path + (prefix,)) + if not node: + rv.add('/'.join(path)) + _walk(root, ()) + return rv + + +class ReloaderLoop(object): + name = None + + # monkeypatched by testsuite. wrapping with `staticmethod` is required in + # case time.sleep has been replaced by a non-c function (e.g. by + # `eventlet.monkey_patch`) before we get here + _sleep = staticmethod(time.sleep) + + def __init__(self, extra_files=None, interval=1): + self.extra_files = set(os.path.abspath(x) + for x in extra_files or ()) + self.interval = interval + + def run(self): + pass + + def restart_with_reloader(self): + """Spawn a new Python interpreter with the same arguments as this one, + but running the reloader thread. + """ + while 1: + _log('info', ' * Restarting with %s' % self.name) + args = _get_args_for_reloading() + new_environ = os.environ.copy() + new_environ['WERKZEUG_RUN_MAIN'] = 'true' + + # a weird bug on windows. sometimes unicode strings end up in the + # environment and subprocess.call does not like this, encode them + # to latin1 and continue. + if os.name == 'nt' and PY2: + for key, value in iteritems(new_environ): + if isinstance(value, text_type): + new_environ[key] = value.encode('iso-8859-1') + + exit_code = subprocess.call(args, env=new_environ, + close_fds=False) + if exit_code != 3: + return exit_code + + def trigger_reload(self, filename): + self.log_reload(filename) + sys.exit(3) + + def log_reload(self, filename): + filename = os.path.abspath(filename) + _log('info', ' * Detected change in %r, reloading' % filename) + + +class StatReloaderLoop(ReloaderLoop): + name = 'stat' + + def run(self): + mtimes = {} + while 1: + for filename in chain(_iter_module_files(), + self.extra_files): + try: + mtime = os.stat(filename).st_mtime + except OSError: + continue + + old_time = mtimes.get(filename) + if old_time is None: + mtimes[filename] = mtime + continue + elif mtime > old_time: + self.trigger_reload(filename) + self._sleep(self.interval) + + +class WatchdogReloaderLoop(ReloaderLoop): + + def __init__(self, *args, **kwargs): + ReloaderLoop.__init__(self, *args, **kwargs) + from watchdog.observers import Observer + from watchdog.events import FileSystemEventHandler + self.observable_paths = set() + + def _check_modification(filename): + if filename in self.extra_files: + self.trigger_reload(filename) + dirname = os.path.dirname(filename) + if dirname.startswith(tuple(self.observable_paths)): + if filename.endswith(('.pyc', '.pyo', '.py')): + self.trigger_reload(filename) + + class _CustomHandler(FileSystemEventHandler): + + def on_created(self, event): + _check_modification(event.src_path) + + def on_modified(self, event): + _check_modification(event.src_path) + + def on_moved(self, event): + _check_modification(event.src_path) + _check_modification(event.dest_path) + + def on_deleted(self, event): + _check_modification(event.src_path) + + reloader_name = Observer.__name__.lower() + if reloader_name.endswith('observer'): + reloader_name = reloader_name[:-8] + reloader_name += ' reloader' + + self.name = reloader_name + + self.observer_class = Observer + self.event_handler = _CustomHandler() + self.should_reload = False + + def trigger_reload(self, filename): + # This is called inside an event handler, which means throwing + # SystemExit has no effect. + # https://github.com/gorakhargosh/watchdog/issues/294 + self.should_reload = True + self.log_reload(filename) + + def run(self): + watches = {} + observer = self.observer_class() + observer.start() + + try: + while not self.should_reload: + to_delete = set(watches) + paths = _find_observable_paths(self.extra_files) + for path in paths: + if path not in watches: + try: + watches[path] = observer.schedule( + self.event_handler, path, recursive=True) + except OSError: + # Clear this path from list of watches We don't want + # the same error message showing again in the next + # iteration. + watches[path] = None + to_delete.discard(path) + for path in to_delete: + watch = watches.pop(path, None) + if watch is not None: + observer.unschedule(watch) + self.observable_paths = paths + self._sleep(self.interval) + finally: + observer.stop() + observer.join() + + sys.exit(3) + + +reloader_loops = { + 'stat': StatReloaderLoop, + 'watchdog': WatchdogReloaderLoop, +} + +try: + __import__('watchdog.observers') +except ImportError: + reloader_loops['auto'] = reloader_loops['stat'] +else: + reloader_loops['auto'] = reloader_loops['watchdog'] + + +def run_with_reloader(main_func, extra_files=None, interval=1, + reloader_type='auto'): + """Run the given function in an independent python interpreter.""" + import signal + reloader = reloader_loops[reloader_type](extra_files, interval) + signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) + try: + if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': + t = threading.Thread(target=main_func, args=()) + t.setDaemon(True) + t.start() + reloader.run() + else: + sys.exit(reloader.restart_with_reloader()) + except KeyboardInterrupt: + pass diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__init__.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__init__.py new file mode 100644 index 0000000..3e572eb --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib + ~~~~~~~~~~~~~~~~ + + Contains user-submitted code that other users may find useful, but which + is not part of the Werkzeug core. Anyone can write code for inclusion in + the `contrib` package. All modules in this package are distributed as an + add-on library and thus are not part of Werkzeug itself. + + This file itself is mostly for informational purposes and to tell the + Python interpreter that `contrib` is a package. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..f736401 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/atom.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/atom.cpython-36.pyc new file mode 100644 index 0000000..49ab1dc Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/atom.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/cache.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/cache.cpython-36.pyc new file mode 100644 index 0000000..8571f8b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/cache.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/fixers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/fixers.cpython-36.pyc new file mode 100644 index 0000000..715cb64 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/fixers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/iterio.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/iterio.cpython-36.pyc new file mode 100644 index 0000000..724f067 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/iterio.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/jsrouting.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/jsrouting.cpython-36.pyc new file mode 100644 index 0000000..3ba729d Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/jsrouting.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/limiter.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/limiter.cpython-36.pyc new file mode 100644 index 0000000..c912c4e Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/limiter.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/lint.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/lint.cpython-36.pyc new file mode 100644 index 0000000..11e7888 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/lint.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/profiler.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/profiler.cpython-36.pyc new file mode 100644 index 0000000..461eab1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/profiler.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc new file mode 100644 index 0000000..b219a75 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/sessions.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/sessions.cpython-36.pyc new file mode 100644 index 0000000..9d2ca6a Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/sessions.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/testtools.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/testtools.cpython-36.pyc new file mode 100644 index 0000000..3bcaa94 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/testtools.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc new file mode 100644 index 0000000..f7d64ee Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/atom.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/atom.py new file mode 100644 index 0000000..51d1a4e --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/atom.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.atom + ~~~~~~~~~~~~~~~~~~~~~ + + This module provides a class called :class:`AtomFeed` which can be + used to generate feeds in the Atom syndication format (see :rfc:`4287`). + + Example:: + + def atom_feed(request): + feed = AtomFeed("My Blog", feed_url=request.url, + url=request.host_url, + subtitle="My example blog for a feed test.") + for post in Post.query.limit(10).all(): + feed.add(post.title, post.body, content_type='html', + author=post.author, url=post.url, id=post.uid, + updated=post.last_update, published=post.pub_date) + return feed.get_response() + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from datetime import datetime + +from werkzeug.utils import escape +from werkzeug.wrappers import BaseResponse +from werkzeug._compat import implements_to_string, string_types + + +XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml' + + +def _make_text_block(name, content, content_type=None): + """Helper function for the builder that creates an XML text block.""" + if content_type == 'xhtml': + return u'<%s type="xhtml">
%s
\n' % \ + (name, XHTML_NAMESPACE, content, name) + if not content_type: + return u'<%s>%s\n' % (name, escape(content), name) + return u'<%s type="%s">%s\n' % (name, content_type, + escape(content), name) + + +def format_iso8601(obj): + """Format a datetime object for iso8601""" + iso8601 = obj.isoformat() + if obj.tzinfo: + return iso8601 + return iso8601 + 'Z' + + +@implements_to_string +class AtomFeed(object): + + """A helper class that creates Atom feeds. + + :param title: the title of the feed. Required. + :param title_type: the type attribute for the title element. One of + ``'html'``, ``'text'`` or ``'xhtml'``. + :param url: the url for the feed (not the url *of* the feed) + :param id: a globally unique id for the feed. Must be an URI. If + not present the `feed_url` is used, but one of both is + required. + :param updated: the time the feed was modified the last time. Must + be a :class:`datetime.datetime` object. If not + present the latest entry's `updated` is used. + Treated as UTC if naive datetime. + :param feed_url: the URL to the feed. Should be the URL that was + requested. + :param author: the author of the feed. Must be either a string (the + name) or a dict with name (required) and uri or + email (both optional). Can be a list of (may be + mixed, too) strings and dicts, too, if there are + multiple authors. Required if not every entry has an + author element. + :param icon: an icon for the feed. + :param logo: a logo for the feed. + :param rights: copyright information for the feed. + :param rights_type: the type attribute for the rights element. One of + ``'html'``, ``'text'`` or ``'xhtml'``. Default is + ``'text'``. + :param subtitle: a short description of the feed. + :param subtitle_type: the type attribute for the subtitle element. + One of ``'text'``, ``'html'``, ``'text'`` + or ``'xhtml'``. Default is ``'text'``. + :param links: additional links. Must be a list of dictionaries with + href (required) and rel, type, hreflang, title, length + (all optional) + :param generator: the software that generated this feed. This must be + a tuple in the form ``(name, url, version)``. If + you don't want to specify one of them, set the item + to `None`. + :param entries: a list with the entries for the feed. Entries can also + be added later with :meth:`add`. + + For more information on the elements see + http://www.atomenabled.org/developers/syndication/ + + Everywhere where a list is demanded, any iterable can be used. + """ + + default_generator = ('Werkzeug', None, None) + + def __init__(self, title=None, entries=None, **kwargs): + self.title = title + self.title_type = kwargs.get('title_type', 'text') + self.url = kwargs.get('url') + self.feed_url = kwargs.get('feed_url', self.url) + self.id = kwargs.get('id', self.feed_url) + self.updated = kwargs.get('updated') + self.author = kwargs.get('author', ()) + self.icon = kwargs.get('icon') + self.logo = kwargs.get('logo') + self.rights = kwargs.get('rights') + self.rights_type = kwargs.get('rights_type') + self.subtitle = kwargs.get('subtitle') + self.subtitle_type = kwargs.get('subtitle_type', 'text') + self.generator = kwargs.get('generator') + if self.generator is None: + self.generator = self.default_generator + self.links = kwargs.get('links', []) + self.entries = entries and list(entries) or [] + + if not hasattr(self.author, '__iter__') \ + or isinstance(self.author, string_types + (dict,)): + self.author = [self.author] + for i, author in enumerate(self.author): + if not isinstance(author, dict): + self.author[i] = {'name': author} + + if not self.title: + raise ValueError('title is required') + if not self.id: + raise ValueError('id is required') + for author in self.author: + if 'name' not in author: + raise TypeError('author must contain at least a name') + + def add(self, *args, **kwargs): + """Add a new entry to the feed. This function can either be called + with a :class:`FeedEntry` or some keyword and positional arguments + that are forwarded to the :class:`FeedEntry` constructor. + """ + if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry): + self.entries.append(args[0]) + else: + kwargs['feed_url'] = self.feed_url + self.entries.append(FeedEntry(*args, **kwargs)) + + def __repr__(self): + return '<%s %r (%d entries)>' % ( + self.__class__.__name__, + self.title, + len(self.entries) + ) + + def generate(self): + """Return a generator that yields pieces of XML.""" + # atom demands either an author element in every entry or a global one + if not self.author: + if any(not e.author for e in self.entries): + self.author = ({'name': 'Unknown author'},) + + if not self.updated: + dates = sorted([entry.updated for entry in self.entries]) + self.updated = dates and dates[-1] or datetime.utcnow() + + yield u'\n' + yield u'\n' + yield ' ' + _make_text_block('title', self.title, self.title_type) + yield u' %s\n' % escape(self.id) + yield u' %s\n' % format_iso8601(self.updated) + if self.url: + yield u' \n' % escape(self.url) + if self.feed_url: + yield u' \n' % \ + escape(self.feed_url) + for link in self.links: + yield u' \n' % ''.join('%s="%s" ' % + (k, escape(link[k])) for k in link) + for author in self.author: + yield u' \n' + yield u' %s\n' % escape(author['name']) + if 'uri' in author: + yield u' %s\n' % escape(author['uri']) + if 'email' in author: + yield ' %s\n' % escape(author['email']) + yield ' \n' + if self.subtitle: + yield ' ' + _make_text_block('subtitle', self.subtitle, + self.subtitle_type) + if self.icon: + yield u' %s\n' % escape(self.icon) + if self.logo: + yield u' %s\n' % escape(self.logo) + if self.rights: + yield ' ' + _make_text_block('rights', self.rights, + self.rights_type) + generator_name, generator_url, generator_version = self.generator + if generator_name or generator_url or generator_version: + tmp = [u' %s\n' % escape(generator_name)) + yield u''.join(tmp) + for entry in self.entries: + for line in entry.generate(): + yield u' ' + line + yield u'\n' + + def to_string(self): + """Convert the feed into a string.""" + return u''.join(self.generate()) + + def get_response(self): + """Return a response object for the feed.""" + return BaseResponse(self.to_string(), mimetype='application/atom+xml') + + def __call__(self, environ, start_response): + """Use the class as WSGI response object.""" + return self.get_response()(environ, start_response) + + def __str__(self): + return self.to_string() + + +@implements_to_string +class FeedEntry(object): + + """Represents a single entry in a feed. + + :param title: the title of the entry. Required. + :param title_type: the type attribute for the title element. One of + ``'html'``, ``'text'`` or ``'xhtml'``. + :param content: the content of the entry. + :param content_type: the type attribute for the content element. One + of ``'html'``, ``'text'`` or ``'xhtml'``. + :param summary: a summary of the entry's content. + :param summary_type: the type attribute for the summary element. One + of ``'html'``, ``'text'`` or ``'xhtml'``. + :param url: the url for the entry. + :param id: a globally unique id for the entry. Must be an URI. If + not present the URL is used, but one of both is required. + :param updated: the time the entry was modified the last time. Must + be a :class:`datetime.datetime` object. Treated as + UTC if naive datetime. Required. + :param author: the author of the entry. Must be either a string (the + name) or a dict with name (required) and uri or + email (both optional). Can be a list of (may be + mixed, too) strings and dicts, too, if there are + multiple authors. Required if the feed does not have an + author element. + :param published: the time the entry was initially published. Must + be a :class:`datetime.datetime` object. Treated as + UTC if naive datetime. + :param rights: copyright information for the entry. + :param rights_type: the type attribute for the rights element. One of + ``'html'``, ``'text'`` or ``'xhtml'``. Default is + ``'text'``. + :param links: additional links. Must be a list of dictionaries with + href (required) and rel, type, hreflang, title, length + (all optional) + :param categories: categories for the entry. Must be a list of dictionaries + with term (required), scheme and label (all optional) + :param xml_base: The xml base (url) for this feed item. If not provided + it will default to the item url. + + For more information on the elements see + http://www.atomenabled.org/developers/syndication/ + + Everywhere where a list is demanded, any iterable can be used. + """ + + def __init__(self, title=None, content=None, feed_url=None, **kwargs): + self.title = title + self.title_type = kwargs.get('title_type', 'text') + self.content = content + self.content_type = kwargs.get('content_type', 'html') + self.url = kwargs.get('url') + self.id = kwargs.get('id', self.url) + self.updated = kwargs.get('updated') + self.summary = kwargs.get('summary') + self.summary_type = kwargs.get('summary_type', 'html') + self.author = kwargs.get('author', ()) + self.published = kwargs.get('published') + self.rights = kwargs.get('rights') + self.links = kwargs.get('links', []) + self.categories = kwargs.get('categories', []) + self.xml_base = kwargs.get('xml_base', feed_url) + + if not hasattr(self.author, '__iter__') \ + or isinstance(self.author, string_types + (dict,)): + self.author = [self.author] + for i, author in enumerate(self.author): + if not isinstance(author, dict): + self.author[i] = {'name': author} + + if not self.title: + raise ValueError('title is required') + if not self.id: + raise ValueError('id is required') + if not self.updated: + raise ValueError('updated is required') + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, + self.title + ) + + def generate(self): + """Yields pieces of ATOM XML.""" + base = '' + if self.xml_base: + base = ' xml:base="%s"' % escape(self.xml_base) + yield u'\n' % base + yield u' ' + _make_text_block('title', self.title, self.title_type) + yield u' %s\n' % escape(self.id) + yield u' %s\n' % format_iso8601(self.updated) + if self.published: + yield u' %s\n' % \ + format_iso8601(self.published) + if self.url: + yield u' \n' % escape(self.url) + for author in self.author: + yield u' \n' + yield u' %s\n' % escape(author['name']) + if 'uri' in author: + yield u' %s\n' % escape(author['uri']) + if 'email' in author: + yield u' %s\n' % escape(author['email']) + yield u' \n' + for link in self.links: + yield u' \n' % ''.join('%s="%s" ' % + (k, escape(link[k])) for k in link) + for category in self.categories: + yield u' \n' % ''.join('%s="%s" ' % + (k, escape(category[k])) for k in category) + if self.summary: + yield u' ' + _make_text_block('summary', self.summary, + self.summary_type) + if self.content: + yield u' ' + _make_text_block('content', self.content, + self.content_type) + yield u'\n' + + def to_string(self): + """Convert the feed item into a unicode object.""" + return u''.join(self.generate()) + + def __str__(self): + return self.to_string() diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/cache.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/cache.py new file mode 100644 index 0000000..179ba24 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/cache.py @@ -0,0 +1,913 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.cache + ~~~~~~~~~~~~~~~~~~~~~~ + + The main problem with dynamic Web sites is, well, they're dynamic. Each + time a user requests a page, the webserver executes a lot of code, queries + the database, renders templates until the visitor gets the page he sees. + + This is a lot more expensive than just loading a file from the file system + and sending it to the visitor. + + For most Web applications, this overhead isn't a big deal but once it + becomes, you will be glad to have a cache system in place. + + How Caching Works + ================= + + Caching is pretty simple. Basically you have a cache object lurking around + somewhere that is connected to a remote cache or the file system or + something else. When the request comes in you check if the current page + is already in the cache and if so, you're returning it from the cache. + Otherwise you generate the page and put it into the cache. (Or a fragment + of the page, you don't have to cache the full thing) + + Here is a simple example of how to cache a sidebar for 5 minutes:: + + def get_sidebar(user): + identifier = 'sidebar_for/user%d' % user.id + value = cache.get(identifier) + if value is not None: + return value + value = generate_sidebar_for(user=user) + cache.set(identifier, value, timeout=60 * 5) + return value + + Creating a Cache Object + ======================= + + To create a cache object you just import the cache system of your choice + from the cache module and instantiate it. Then you can start working + with that object: + + >>> from werkzeug.contrib.cache import SimpleCache + >>> c = SimpleCache() + >>> c.set("foo", "value") + >>> c.get("foo") + 'value' + >>> c.get("missing") is None + True + + Please keep in mind that you have to create the cache and put it somewhere + you have access to it (either as a module global you can import or you just + put it into your WSGI application). + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import os +import re +import errno +import tempfile +import platform +from hashlib import md5 +from time import time +try: + import cPickle as pickle +except ImportError: # pragma: no cover + import pickle + +from werkzeug._compat import iteritems, string_types, text_type, \ + integer_types, to_native +from werkzeug.posixemulation import rename + + +def _items(mappingorseq): + """Wrapper for efficient iteration over mappings represented by dicts + or sequences:: + + >>> for k, v in _items((i, i*i) for i in xrange(5)): + ... assert k*k == v + + >>> for k, v in _items(dict((i, i*i) for i in xrange(5))): + ... assert k*k == v + + """ + if hasattr(mappingorseq, 'items'): + return iteritems(mappingorseq) + return mappingorseq + + +class BaseCache(object): + + """Baseclass for the cache systems. All the cache systems implement this + API or a superset of it. + + :param default_timeout: the default timeout (in seconds) that is used if + no timeout is specified on :meth:`set`. A timeout + of 0 indicates that the cache never expires. + """ + + def __init__(self, default_timeout=300): + self.default_timeout = default_timeout + + def _normalize_timeout(self, timeout): + if timeout is None: + timeout = self.default_timeout + return timeout + + def get(self, key): + """Look up key in the cache and return the value for it. + + :param key: the key to be looked up. + :returns: The value if it exists and is readable, else ``None``. + """ + return None + + def delete(self, key): + """Delete `key` from the cache. + + :param key: the key to delete. + :returns: Whether the key existed and has been deleted. + :rtype: boolean + """ + return True + + def get_many(self, *keys): + """Returns a list of values for the given keys. + For each key an item in the list is created:: + + foo, bar = cache.get_many("foo", "bar") + + Has the same error handling as :meth:`get`. + + :param keys: The function accepts multiple keys as positional + arguments. + """ + return [self.get(k) for k in keys] + + def get_dict(self, *keys): + """Like :meth:`get_many` but return a dict:: + + d = cache.get_dict("foo", "bar") + foo = d["foo"] + bar = d["bar"] + + :param keys: The function accepts multiple keys as positional + arguments. + """ + return dict(zip(keys, self.get_many(*keys))) + + def set(self, key, value, timeout=None): + """Add a new key/value to the cache (overwrites value, if key already + exists in the cache). + + :param key: the key to set + :param value: the value for the key + :param timeout: the cache timeout for the key in seconds (if not + specified, it uses the default timeout). A timeout of + 0 idicates that the cache never expires. + :returns: ``True`` if key has been updated, ``False`` for backend + errors. Pickling errors, however, will raise a subclass of + ``pickle.PickleError``. + :rtype: boolean + """ + return True + + def add(self, key, value, timeout=None): + """Works like :meth:`set` but does not overwrite the values of already + existing keys. + + :param key: the key to set + :param value: the value for the key + :param timeout: the cache timeout for the key in seconds (if not + specified, it uses the default timeout). A timeout of + 0 idicates that the cache never expires. + :returns: Same as :meth:`set`, but also ``False`` for already + existing keys. + :rtype: boolean + """ + return True + + def set_many(self, mapping, timeout=None): + """Sets multiple keys and values from a mapping. + + :param mapping: a mapping with the keys/values to set. + :param timeout: the cache timeout for the key in seconds (if not + specified, it uses the default timeout). A timeout of + 0 idicates that the cache never expires. + :returns: Whether all given keys have been set. + :rtype: boolean + """ + rv = True + for key, value in _items(mapping): + if not self.set(key, value, timeout): + rv = False + return rv + + def delete_many(self, *keys): + """Deletes multiple keys at once. + + :param keys: The function accepts multiple keys as positional + arguments. + :returns: Whether all given keys have been deleted. + :rtype: boolean + """ + return all(self.delete(key) for key in keys) + + def has(self, key): + """Checks if a key exists in the cache without returning it. This is a + cheap operation that bypasses loading the actual data on the backend. + + This method is optional and may not be implemented on all caches. + + :param key: the key to check + """ + raise NotImplementedError( + '%s doesn\'t have an efficient implementation of `has`. That ' + 'means it is impossible to check whether a key exists without ' + 'fully loading the key\'s data. Consider using `self.get` ' + 'explicitly if you don\'t care about performance.' + ) + + def clear(self): + """Clears the cache. Keep in mind that not all caches support + completely clearing the cache. + + :returns: Whether the cache has been cleared. + :rtype: boolean + """ + return True + + def inc(self, key, delta=1): + """Increments the value of a key by `delta`. If the key does + not yet exist it is initialized with `delta`. + + For supporting caches this is an atomic operation. + + :param key: the key to increment. + :param delta: the delta to add. + :returns: The new value or ``None`` for backend errors. + """ + value = (self.get(key) or 0) + delta + return value if self.set(key, value) else None + + def dec(self, key, delta=1): + """Decrements the value of a key by `delta`. If the key does + not yet exist it is initialized with `-delta`. + + For supporting caches this is an atomic operation. + + :param key: the key to increment. + :param delta: the delta to subtract. + :returns: The new value or `None` for backend errors. + """ + value = (self.get(key) or 0) - delta + return value if self.set(key, value) else None + + +class NullCache(BaseCache): + + """A cache that doesn't cache. This can be useful for unit testing. + + :param default_timeout: a dummy parameter that is ignored but exists + for API compatibility with other caches. + """ + + def has(self, key): + return False + + +class SimpleCache(BaseCache): + + """Simple memory cache for single process environments. This class exists + mainly for the development server and is not 100% thread safe. It tries + to use as many atomic operations as possible and no locks for simplicity + but it could happen under heavy load that keys are added multiple times. + + :param threshold: the maximum number of items the cache stores before + it starts deleting some. + :param default_timeout: the default timeout that is used if no timeout is + specified on :meth:`~BaseCache.set`. A timeout of + 0 indicates that the cache never expires. + """ + + def __init__(self, threshold=500, default_timeout=300): + BaseCache.__init__(self, default_timeout) + self._cache = {} + self.clear = self._cache.clear + self._threshold = threshold + + def _prune(self): + if len(self._cache) > self._threshold: + now = time() + toremove = [] + for idx, (key, (expires, _)) in enumerate(self._cache.items()): + if (expires != 0 and expires <= now) or idx % 3 == 0: + toremove.append(key) + for key in toremove: + self._cache.pop(key, None) + + def _normalize_timeout(self, timeout): + timeout = BaseCache._normalize_timeout(self, timeout) + if timeout > 0: + timeout = time() + timeout + return timeout + + def get(self, key): + try: + expires, value = self._cache[key] + if expires == 0 or expires > time(): + return pickle.loads(value) + except (KeyError, pickle.PickleError): + return None + + def set(self, key, value, timeout=None): + expires = self._normalize_timeout(timeout) + self._prune() + self._cache[key] = (expires, pickle.dumps(value, + pickle.HIGHEST_PROTOCOL)) + return True + + def add(self, key, value, timeout=None): + expires = self._normalize_timeout(timeout) + self._prune() + item = (expires, pickle.dumps(value, + pickle.HIGHEST_PROTOCOL)) + if key in self._cache: + return False + self._cache.setdefault(key, item) + return True + + def delete(self, key): + return self._cache.pop(key, None) is not None + + def has(self, key): + try: + expires, value = self._cache[key] + return expires == 0 or expires > time() + except KeyError: + return False + +_test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match + + +class MemcachedCache(BaseCache): + + """A cache that uses memcached as backend. + + The first argument can either be an object that resembles the API of a + :class:`memcache.Client` or a tuple/list of server addresses. In the + event that a tuple/list is passed, Werkzeug tries to import the best + available memcache library. + + This cache looks into the following packages/modules to find bindings for + memcached: + + - ``pylibmc`` + - ``google.appengine.api.memcached`` + - ``memcached`` + - ``libmc`` + + Implementation notes: This cache backend works around some limitations in + memcached to simplify the interface. For example unicode keys are encoded + to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return + the keys in the same format as passed. Furthermore all get methods + silently ignore key errors to not cause problems when untrusted user data + is passed to the get methods which is often the case in web applications. + + :param servers: a list or tuple of server addresses or alternatively + a :class:`memcache.Client` or a compatible client. + :param default_timeout: the default timeout that is used if no timeout is + specified on :meth:`~BaseCache.set`. A timeout of + 0 indicates that the cache never expires. + :param key_prefix: a prefix that is added before all keys. This makes it + possible to use the same memcached server for different + applications. Keep in mind that + :meth:`~BaseCache.clear` will also clear keys with a + different prefix. + """ + + def __init__(self, servers=None, default_timeout=300, key_prefix=None): + BaseCache.__init__(self, default_timeout) + if servers is None or isinstance(servers, (list, tuple)): + if servers is None: + servers = ['127.0.0.1:11211'] + self._client = self.import_preferred_memcache_lib(servers) + if self._client is None: + raise RuntimeError('no memcache module found') + else: + # NOTE: servers is actually an already initialized memcache + # client. + self._client = servers + + self.key_prefix = to_native(key_prefix) + + def _normalize_key(self, key): + key = to_native(key, 'utf-8') + if self.key_prefix: + key = self.key_prefix + key + return key + + def _normalize_timeout(self, timeout): + timeout = BaseCache._normalize_timeout(self, timeout) + if timeout > 0: + timeout = int(time()) + timeout + return timeout + + def get(self, key): + key = self._normalize_key(key) + # memcached doesn't support keys longer than that. Because often + # checks for so long keys can occur because it's tested from user + # submitted data etc we fail silently for getting. + if _test_memcached_key(key): + return self._client.get(key) + + def get_dict(self, *keys): + key_mapping = {} + have_encoded_keys = False + for key in keys: + encoded_key = self._normalize_key(key) + if not isinstance(key, str): + have_encoded_keys = True + if _test_memcached_key(key): + key_mapping[encoded_key] = key + _keys = list(key_mapping) + d = rv = self._client.get_multi(_keys) + if have_encoded_keys or self.key_prefix: + rv = {} + for key, value in iteritems(d): + rv[key_mapping[key]] = value + if len(rv) < len(keys): + for key in keys: + if key not in rv: + rv[key] = None + return rv + + def add(self, key, value, timeout=None): + key = self._normalize_key(key) + timeout = self._normalize_timeout(timeout) + return self._client.add(key, value, timeout) + + def set(self, key, value, timeout=None): + key = self._normalize_key(key) + timeout = self._normalize_timeout(timeout) + return self._client.set(key, value, timeout) + + def get_many(self, *keys): + d = self.get_dict(*keys) + return [d[key] for key in keys] + + def set_many(self, mapping, timeout=None): + new_mapping = {} + for key, value in _items(mapping): + key = self._normalize_key(key) + new_mapping[key] = value + + timeout = self._normalize_timeout(timeout) + failed_keys = self._client.set_multi(new_mapping, timeout) + return not failed_keys + + def delete(self, key): + key = self._normalize_key(key) + if _test_memcached_key(key): + return self._client.delete(key) + + def delete_many(self, *keys): + new_keys = [] + for key in keys: + key = self._normalize_key(key) + if _test_memcached_key(key): + new_keys.append(key) + return self._client.delete_multi(new_keys) + + def has(self, key): + key = self._normalize_key(key) + if _test_memcached_key(key): + return self._client.append(key, '') + return False + + def clear(self): + return self._client.flush_all() + + def inc(self, key, delta=1): + key = self._normalize_key(key) + return self._client.incr(key, delta) + + def dec(self, key, delta=1): + key = self._normalize_key(key) + return self._client.decr(key, delta) + + def import_preferred_memcache_lib(self, servers): + """Returns an initialized memcache client. Used by the constructor.""" + try: + import pylibmc + except ImportError: + pass + else: + return pylibmc.Client(servers) + + try: + from google.appengine.api import memcache + except ImportError: + pass + else: + return memcache.Client() + + try: + import memcache + except ImportError: + pass + else: + return memcache.Client(servers) + + try: + import libmc + except ImportError: + pass + else: + return libmc.Client(servers) + + +# backwards compatibility +GAEMemcachedCache = MemcachedCache + + +class RedisCache(BaseCache): + + """Uses the Redis key-value store as a cache backend. + + The first argument can be either a string denoting address of the Redis + server or an object resembling an instance of a redis.Redis class. + + Note: Python Redis API already takes care of encoding unicode strings on + the fly. + + .. versionadded:: 0.7 + + .. versionadded:: 0.8 + `key_prefix` was added. + + .. versionchanged:: 0.8 + This cache backend now properly serializes objects. + + .. versionchanged:: 0.8.3 + This cache backend now supports password authentication. + + .. versionchanged:: 0.10 + ``**kwargs`` is now passed to the redis object. + + :param host: address of the Redis server or an object which API is + compatible with the official Python Redis client (redis-py). + :param port: port number on which Redis server listens for connections. + :param password: password authentication for the Redis server. + :param db: db (zero-based numeric index) on Redis Server to connect. + :param default_timeout: the default timeout that is used if no timeout is + specified on :meth:`~BaseCache.set`. A timeout of + 0 indicates that the cache never expires. + :param key_prefix: A prefix that should be added to all keys. + + Any additional keyword arguments will be passed to ``redis.Redis``. + """ + + def __init__(self, host='localhost', port=6379, password=None, + db=0, default_timeout=300, key_prefix=None, **kwargs): + BaseCache.__init__(self, default_timeout) + if host is None: + raise ValueError('RedisCache host parameter may not be None') + if isinstance(host, string_types): + try: + import redis + except ImportError: + raise RuntimeError('no redis module found') + if kwargs.get('decode_responses', None): + raise ValueError('decode_responses is not supported by ' + 'RedisCache.') + self._client = redis.Redis(host=host, port=port, password=password, + db=db, **kwargs) + else: + self._client = host + self.key_prefix = key_prefix or '' + + def _normalize_timeout(self, timeout): + timeout = BaseCache._normalize_timeout(self, timeout) + if timeout == 0: + timeout = -1 + return timeout + + def dump_object(self, value): + """Dumps an object into a string for redis. By default it serializes + integers as regular string and pickle dumps everything else. + """ + t = type(value) + if t in integer_types: + return str(value).encode('ascii') + return b'!' + pickle.dumps(value) + + def load_object(self, value): + """The reversal of :meth:`dump_object`. This might be called with + None. + """ + if value is None: + return None + if value.startswith(b'!'): + try: + return pickle.loads(value[1:]) + except pickle.PickleError: + return None + try: + return int(value) + except ValueError: + # before 0.8 we did not have serialization. Still support that. + return value + + def get(self, key): + return self.load_object(self._client.get(self.key_prefix + key)) + + def get_many(self, *keys): + if self.key_prefix: + keys = [self.key_prefix + key for key in keys] + return [self.load_object(x) for x in self._client.mget(keys)] + + def set(self, key, value, timeout=None): + timeout = self._normalize_timeout(timeout) + dump = self.dump_object(value) + if timeout == -1: + result = self._client.set(name=self.key_prefix + key, + value=dump) + else: + result = self._client.setex(name=self.key_prefix + key, + value=dump, time=timeout) + return result + + def add(self, key, value, timeout=None): + timeout = self._normalize_timeout(timeout) + dump = self.dump_object(value) + return ( + self._client.setnx(name=self.key_prefix + key, value=dump) and + self._client.expire(name=self.key_prefix + key, time=timeout) + ) + + def set_many(self, mapping, timeout=None): + timeout = self._normalize_timeout(timeout) + # Use transaction=False to batch without calling redis MULTI + # which is not supported by twemproxy + pipe = self._client.pipeline(transaction=False) + + for key, value in _items(mapping): + dump = self.dump_object(value) + if timeout == -1: + pipe.set(name=self.key_prefix + key, value=dump) + else: + pipe.setex(name=self.key_prefix + key, value=dump, + time=timeout) + return pipe.execute() + + def delete(self, key): + return self._client.delete(self.key_prefix + key) + + def delete_many(self, *keys): + if not keys: + return + if self.key_prefix: + keys = [self.key_prefix + key for key in keys] + return self._client.delete(*keys) + + def has(self, key): + return self._client.exists(self.key_prefix + key) + + def clear(self): + status = False + if self.key_prefix: + keys = self._client.keys(self.key_prefix + '*') + if keys: + status = self._client.delete(*keys) + else: + status = self._client.flushdb() + return status + + def inc(self, key, delta=1): + return self._client.incr(name=self.key_prefix + key, amount=delta) + + def dec(self, key, delta=1): + return self._client.decr(name=self.key_prefix + key, amount=delta) + + +class FileSystemCache(BaseCache): + + """A cache that stores the items on the file system. This cache depends + on being the only user of the `cache_dir`. Make absolutely sure that + nobody but this cache stores files there or otherwise the cache will + randomly delete files therein. + + :param cache_dir: the directory where cache files are stored. + :param threshold: the maximum number of items the cache stores before + it starts deleting some. A threshold value of 0 + indicates no threshold. + :param default_timeout: the default timeout that is used if no timeout is + specified on :meth:`~BaseCache.set`. A timeout of + 0 indicates that the cache never expires. + :param mode: the file mode wanted for the cache files, default 0600 + """ + + #: used for temporary files by the FileSystemCache + _fs_transaction_suffix = '.__wz_cache' + #: keep amount of files in a cache element + _fs_count_file = '__wz_cache_count' + + def __init__(self, cache_dir, threshold=500, default_timeout=300, + mode=0o600): + BaseCache.__init__(self, default_timeout) + self._path = cache_dir + self._threshold = threshold + self._mode = mode + + try: + os.makedirs(self._path) + except OSError as ex: + if ex.errno != errno.EEXIST: + raise + + self._update_count(value=len(self._list_dir())) + + @property + def _file_count(self): + return self.get(self._fs_count_file) or 0 + + def _update_count(self, delta=None, value=None): + # If we have no threshold, don't count files + if self._threshold == 0: + return + + if delta: + new_count = self._file_count + delta + else: + new_count = value or 0 + self.set(self._fs_count_file, new_count, mgmt_element=True) + + def _normalize_timeout(self, timeout): + timeout = BaseCache._normalize_timeout(self, timeout) + if timeout != 0: + timeout = time() + timeout + return int(timeout) + + def _list_dir(self): + """return a list of (fully qualified) cache filenames + """ + mgmt_files = [self._get_filename(name).split('/')[-1] + for name in (self._fs_count_file,)] + return [os.path.join(self._path, fn) for fn in os.listdir(self._path) + if not fn.endswith(self._fs_transaction_suffix) + and fn not in mgmt_files] + + def _prune(self): + if self._threshold == 0 or not self._file_count > self._threshold: + return + + entries = self._list_dir() + now = time() + for idx, fname in enumerate(entries): + try: + remove = False + with open(fname, 'rb') as f: + expires = pickle.load(f) + remove = (expires != 0 and expires <= now) or idx % 3 == 0 + + if remove: + os.remove(fname) + except (IOError, OSError): + pass + self._update_count(value=len(self._list_dir())) + + def clear(self): + for fname in self._list_dir(): + try: + os.remove(fname) + except (IOError, OSError): + self._update_count(value=len(self._list_dir())) + return False + self._update_count(value=0) + return True + + def _get_filename(self, key): + if isinstance(key, text_type): + key = key.encode('utf-8') # XXX unicode review + hash = md5(key).hexdigest() + return os.path.join(self._path, hash) + + def get(self, key): + filename = self._get_filename(key) + try: + with open(filename, 'rb') as f: + pickle_time = pickle.load(f) + if pickle_time == 0 or pickle_time >= time(): + return pickle.load(f) + else: + os.remove(filename) + return None + except (IOError, OSError, pickle.PickleError): + return None + + def add(self, key, value, timeout=None): + filename = self._get_filename(key) + if not os.path.exists(filename): + return self.set(key, value, timeout) + return False + + def set(self, key, value, timeout=None, mgmt_element=False): + # Management elements have no timeout + if mgmt_element: + timeout = 0 + + # Don't prune on management element update, to avoid loop + else: + self._prune() + + timeout = self._normalize_timeout(timeout) + filename = self._get_filename(key) + try: + fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix, + dir=self._path) + with os.fdopen(fd, 'wb') as f: + pickle.dump(timeout, f, 1) + pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) + rename(tmp, filename) + os.chmod(filename, self._mode) + except (IOError, OSError): + return False + else: + # Management elements should not count towards threshold + if not mgmt_element: + self._update_count(delta=1) + return True + + def delete(self, key, mgmt_element=False): + try: + os.remove(self._get_filename(key)) + except (IOError, OSError): + return False + else: + # Management elements should not count towards threshold + if not mgmt_element: + self._update_count(delta=-1) + return True + + def has(self, key): + filename = self._get_filename(key) + try: + with open(filename, 'rb') as f: + pickle_time = pickle.load(f) + if pickle_time == 0 or pickle_time >= time(): + return True + else: + os.remove(filename) + return False + except (IOError, OSError, pickle.PickleError): + return False + + +class UWSGICache(BaseCache): + """ Implements the cache using uWSGI's caching framework. + + .. note:: + This class cannot be used when running under PyPy, because the uWSGI + API implementation for PyPy is lacking the needed functionality. + + :param default_timeout: The default timeout in seconds. + :param cache: The name of the caching instance to connect to, for + example: mycache@localhost:3031, defaults to an empty string, which + means uWSGI will cache in the local instance. If the cache is in the + same instance as the werkzeug app, you only have to provide the name of + the cache. + """ + def __init__(self, default_timeout=300, cache=''): + BaseCache.__init__(self, default_timeout) + + if platform.python_implementation() == 'PyPy': + raise RuntimeError("uWSGI caching does not work under PyPy, see " + "the docs for more details.") + + try: + import uwsgi + self._uwsgi = uwsgi + except ImportError: + raise RuntimeError("uWSGI could not be imported, are you " + "running under uWSGI?") + + self.cache = cache + + def get(self, key): + rv = self._uwsgi.cache_get(key, self.cache) + if rv is None: + return + return pickle.loads(rv) + + def delete(self, key): + return self._uwsgi.cache_del(key, self.cache) + + def set(self, key, value, timeout=None): + return self._uwsgi.cache_update(key, pickle.dumps(value), + self._normalize_timeout(timeout), + self.cache) + + def add(self, key, value, timeout=None): + return self._uwsgi.cache_set(key, pickle.dumps(value), + self._normalize_timeout(timeout), + self.cache) + + def clear(self): + return self._uwsgi.cache_clear(self.cache) + + def has(self, key): + return self._uwsgi.cache_exists(key, self.cache) is not None diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/fixers.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/fixers.py new file mode 100644 index 0000000..b88a861 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/fixers.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.fixers + ~~~~~~~~~~~~~~~~~~~~~~~ + + .. versionadded:: 0.5 + + This module includes various helpers that fix bugs in web servers. They may + be necessary for some versions of a buggy web server but not others. We try + to stay updated with the status of the bugs as good as possible but you have + to make sure whether they fix the problem you encounter. + + If you notice bugs in webservers not fixed in this module consider + contributing a patch. + + :copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +try: + from urllib import unquote +except ImportError: + from urllib.parse import unquote + +from werkzeug.http import parse_options_header, parse_cache_control_header, \ + parse_set_header +from werkzeug.useragents import UserAgent +from werkzeug.datastructures import Headers, ResponseCacheControl + + +class CGIRootFix(object): + + """Wrap the application in this middleware if you are using FastCGI or CGI + and you have problems with your app root being set to the cgi script's path + instead of the path users are going to visit + + .. versionchanged:: 0.9 + Added `app_root` parameter and renamed from `LighttpdCGIRootFix`. + + :param app: the WSGI application + :param app_root: Defaulting to ``'/'``, you can set this to something else + if your app is mounted somewhere else. + """ + + def __init__(self, app, app_root='/'): + self.app = app + self.app_root = app_root + + def __call__(self, environ, start_response): + # only set PATH_INFO for older versions of Lighty or if no + # server software is provided. That's because the test was + # added in newer Werkzeug versions and we don't want to break + # people's code if they are using this fixer in a test that + # does not set the SERVER_SOFTWARE key. + if 'SERVER_SOFTWARE' not in environ or \ + environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28': + environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \ + environ.get('PATH_INFO', '') + environ['SCRIPT_NAME'] = self.app_root.strip('/') + return self.app(environ, start_response) + +# backwards compatibility +LighttpdCGIRootFix = CGIRootFix + + +class PathInfoFromRequestUriFix(object): + + """On windows environment variables are limited to the system charset + which makes it impossible to store the `PATH_INFO` variable in the + environment without loss of information on some systems. + + This is for example a problem for CGI scripts on a Windows Apache. + + This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`, + `REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the + fix can only be applied if the webserver supports either of these + variables. + + :param app: the WSGI application + """ + + def __init__(self, app): + self.app = app + + def __call__(self, environ, start_response): + for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL': + if key not in environ: + continue + request_uri = unquote(environ[key]) + script_name = unquote(environ.get('SCRIPT_NAME', '')) + if request_uri.startswith(script_name): + environ['PATH_INFO'] = request_uri[len(script_name):] \ + .split('?', 1)[0] + break + return self.app(environ, start_response) + + +class ProxyFix(object): + + """This middleware can be applied to add HTTP proxy support to an + application that was not designed with HTTP proxies in mind. It + sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While + Werkzeug-based applications already can use + :py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if + behind proxy setups, this middleware can be used for applications which + access the WSGI environment directly. + + If you have more than one proxy server in front of your app, set + `num_proxies` accordingly. + + Do not use this middleware in non-proxy setups for security reasons. + + The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in + the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and + `werkzeug.proxy_fix.orig_http_host`. + + :param app: the WSGI application + :param num_proxies: the number of proxy servers in front of the app. + """ + + def __init__(self, app, num_proxies=1): + self.app = app + self.num_proxies = num_proxies + + def get_remote_addr(self, forwarded_for): + """Selects the new remote addr from the given list of ips in + X-Forwarded-For. By default it picks the one that the `num_proxies` + proxy server provides. Before 0.9 it would always pick the first. + + .. versionadded:: 0.8 + """ + if len(forwarded_for) >= self.num_proxies: + return forwarded_for[-self.num_proxies] + + def __call__(self, environ, start_response): + getter = environ.get + forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '') + forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',') + forwarded_host = getter('HTTP_X_FORWARDED_HOST', '') + environ.update({ + 'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'), + 'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'), + 'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST') + }) + forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x] + remote_addr = self.get_remote_addr(forwarded_for) + if remote_addr is not None: + environ['REMOTE_ADDR'] = remote_addr + if forwarded_host: + environ['HTTP_HOST'] = forwarded_host + if forwarded_proto: + environ['wsgi.url_scheme'] = forwarded_proto + return self.app(environ, start_response) + + +class HeaderRewriterFix(object): + + """This middleware can remove response headers and add others. This + is for example useful to remove the `Date` header from responses if you + are using a server that adds that header, no matter if it's present or + not or to add `X-Powered-By` headers:: + + app = HeaderRewriterFix(app, remove_headers=['Date'], + add_headers=[('X-Powered-By', 'WSGI')]) + + :param app: the WSGI application + :param remove_headers: a sequence of header keys that should be + removed. + :param add_headers: a sequence of ``(key, value)`` tuples that should + be added. + """ + + def __init__(self, app, remove_headers=None, add_headers=None): + self.app = app + self.remove_headers = set(x.lower() for x in (remove_headers or ())) + self.add_headers = list(add_headers or ()) + + def __call__(self, environ, start_response): + def rewriting_start_response(status, headers, exc_info=None): + new_headers = [] + for key, value in headers: + if key.lower() not in self.remove_headers: + new_headers.append((key, value)) + new_headers += self.add_headers + return start_response(status, new_headers, exc_info) + return self.app(environ, rewriting_start_response) + + +class InternetExplorerFix(object): + + """This middleware fixes a couple of bugs with Microsoft Internet + Explorer. Currently the following fixes are applied: + + - removing of `Vary` headers for unsupported mimetypes which + causes troubles with caching. Can be disabled by passing + ``fix_vary=False`` to the constructor. + see: http://support.microsoft.com/kb/824847/en-us + + - removes offending headers to work around caching bugs in + Internet Explorer if `Content-Disposition` is set. Can be + disabled by passing ``fix_attach=False`` to the constructor. + + If it does not detect affected Internet Explorer versions it won't touch + the request / response. + """ + + # This code was inspired by Django fixers for the same bugs. The + # fix_vary and fix_attach fixers were originally implemented in Django + # by Michael Axiak and is available as part of the Django project: + # http://code.djangoproject.com/ticket/4148 + + def __init__(self, app, fix_vary=True, fix_attach=True): + self.app = app + self.fix_vary = fix_vary + self.fix_attach = fix_attach + + def fix_headers(self, environ, headers, status=None): + if self.fix_vary: + header = headers.get('content-type', '') + mimetype, options = parse_options_header(header) + if mimetype not in ('text/html', 'text/plain', 'text/sgml'): + headers.pop('vary', None) + + if self.fix_attach and 'content-disposition' in headers: + pragma = parse_set_header(headers.get('pragma', '')) + pragma.discard('no-cache') + header = pragma.to_header() + if not header: + headers.pop('pragma', '') + else: + headers['Pragma'] = header + header = headers.get('cache-control', '') + if header: + cc = parse_cache_control_header(header, + cls=ResponseCacheControl) + cc.no_cache = None + cc.no_store = False + header = cc.to_header() + if not header: + headers.pop('cache-control', '') + else: + headers['Cache-Control'] = header + + def run_fixed(self, environ, start_response): + def fixing_start_response(status, headers, exc_info=None): + headers = Headers(headers) + self.fix_headers(environ, headers, status) + return start_response(status, headers.to_wsgi_list(), exc_info) + return self.app(environ, fixing_start_response) + + def __call__(self, environ, start_response): + ua = UserAgent(environ) + if ua.browser != 'msie': + return self.app(environ, start_response) + return self.run_fixed(environ, start_response) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/iterio.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/iterio.py new file mode 100644 index 0000000..c0ced37 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/iterio.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +r""" + werkzeug.contrib.iterio + ~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements a :class:`IterIO` that converts an iterator into + a stream object and the other way round. Converting streams into + iterators requires the `greenlet`_ module. + + To convert an iterator into a stream all you have to do is to pass it + directly to the :class:`IterIO` constructor. In this example we pass it + a newly created generator:: + + def foo(): + yield "something\n" + yield "otherthings" + stream = IterIO(foo()) + print stream.read() # read the whole iterator + + The other way round works a bit different because we have to ensure that + the code execution doesn't take place yet. An :class:`IterIO` call with a + callable as first argument does two things. The function itself is passed + an :class:`IterIO` stream it can feed. The object returned by the + :class:`IterIO` constructor on the other hand is not an stream object but + an iterator:: + + def foo(stream): + stream.write("some") + stream.write("thing") + stream.flush() + stream.write("otherthing") + iterator = IterIO(foo) + print iterator.next() # prints something + print iterator.next() # prints otherthing + iterator.next() # raises StopIteration + + .. _greenlet: https://github.com/python-greenlet/greenlet + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +try: + import greenlet +except ImportError: + greenlet = None + +from werkzeug._compat import implements_iterator + + +def _mixed_join(iterable, sentinel): + """concatenate any string type in an intelligent way.""" + iterator = iter(iterable) + first_item = next(iterator, sentinel) + if isinstance(first_item, bytes): + return first_item + b''.join(iterator) + return first_item + u''.join(iterator) + + +def _newline(reference_string): + if isinstance(reference_string, bytes): + return b'\n' + return u'\n' + + +@implements_iterator +class IterIO(object): + + """Instances of this object implement an interface compatible with the + standard Python :class:`file` object. Streams are either read-only or + write-only depending on how the object is created. + + If the first argument is an iterable a file like object is returned that + returns the contents of the iterable. In case the iterable is empty + read operations will return the sentinel value. + + If the first argument is a callable then the stream object will be + created and passed to that function. The caller itself however will + not receive a stream but an iterable. The function will be be executed + step by step as something iterates over the returned iterable. Each + call to :meth:`flush` will create an item for the iterable. If + :meth:`flush` is called without any writes in-between the sentinel + value will be yielded. + + Note for Python 3: due to the incompatible interface of bytes and + streams you should set the sentinel value explicitly to an empty + bytestring (``b''``) if you are expecting to deal with bytes as + otherwise the end of the stream is marked with the wrong sentinel + value. + + .. versionadded:: 0.9 + `sentinel` parameter was added. + """ + + def __new__(cls, obj, sentinel=''): + try: + iterator = iter(obj) + except TypeError: + return IterI(obj, sentinel) + return IterO(iterator, sentinel) + + def __iter__(self): + return self + + def tell(self): + if self.closed: + raise ValueError('I/O operation on closed file') + return self.pos + + def isatty(self): + if self.closed: + raise ValueError('I/O operation on closed file') + return False + + def seek(self, pos, mode=0): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def truncate(self, size=None): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def write(self, s): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def writelines(self, list): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def read(self, n=-1): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def readlines(self, sizehint=0): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def readline(self, length=None): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def flush(self): + if self.closed: + raise ValueError('I/O operation on closed file') + raise IOError(9, 'Bad file descriptor') + + def __next__(self): + if self.closed: + raise StopIteration() + line = self.readline() + if not line: + raise StopIteration() + return line + + +class IterI(IterIO): + + """Convert an stream into an iterator.""" + + def __new__(cls, func, sentinel=''): + if greenlet is None: + raise RuntimeError('IterI requires greenlet support') + stream = object.__new__(cls) + stream._parent = greenlet.getcurrent() + stream._buffer = [] + stream.closed = False + stream.sentinel = sentinel + stream.pos = 0 + + def run(): + func(stream) + stream.close() + + g = greenlet.greenlet(run, stream._parent) + while 1: + rv = g.switch() + if not rv: + return + yield rv[0] + + def close(self): + if not self.closed: + self.closed = True + self._flush_impl() + + def write(self, s): + if self.closed: + raise ValueError('I/O operation on closed file') + if s: + self.pos += len(s) + self._buffer.append(s) + + def writelines(self, list): + for item in list: + self.write(item) + + def flush(self): + if self.closed: + raise ValueError('I/O operation on closed file') + self._flush_impl() + + def _flush_impl(self): + data = _mixed_join(self._buffer, self.sentinel) + self._buffer = [] + if not data and self.closed: + self._parent.switch() + else: + self._parent.switch((data,)) + + +class IterO(IterIO): + + """Iter output. Wrap an iterator and give it a stream like interface.""" + + def __new__(cls, gen, sentinel=''): + self = object.__new__(cls) + self._gen = gen + self._buf = None + self.sentinel = sentinel + self.closed = False + self.pos = 0 + return self + + def __iter__(self): + return self + + def _buf_append(self, string): + '''Replace string directly without appending to an empty string, + avoiding type issues.''' + if not self._buf: + self._buf = string + else: + self._buf += string + + def close(self): + if not self.closed: + self.closed = True + if hasattr(self._gen, 'close'): + self._gen.close() + + def seek(self, pos, mode=0): + if self.closed: + raise ValueError('I/O operation on closed file') + if mode == 1: + pos += self.pos + elif mode == 2: + self.read() + self.pos = min(self.pos, self.pos + pos) + return + elif mode != 0: + raise IOError('Invalid argument') + buf = [] + try: + tmp_end_pos = len(self._buf) + while pos > tmp_end_pos: + item = next(self._gen) + tmp_end_pos += len(item) + buf.append(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + self.pos = max(0, pos) + + def read(self, n=-1): + if self.closed: + raise ValueError('I/O operation on closed file') + if n < 0: + self._buf_append(_mixed_join(self._gen, self.sentinel)) + result = self._buf[self.pos:] + self.pos += len(result) + return result + new_pos = self.pos + n + buf = [] + try: + tmp_end_pos = 0 if self._buf is None else len(self._buf) + while new_pos > tmp_end_pos or (self._buf is None and not buf): + item = next(self._gen) + tmp_end_pos += len(item) + buf.append(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + + if self._buf is None: + return self.sentinel + + new_pos = max(0, new_pos) + try: + return self._buf[self.pos:new_pos] + finally: + self.pos = min(new_pos, len(self._buf)) + + def readline(self, length=None): + if self.closed: + raise ValueError('I/O operation on closed file') + + nl_pos = -1 + if self._buf: + nl_pos = self._buf.find(_newline(self._buf), self.pos) + buf = [] + try: + if self._buf is None: + pos = self.pos + else: + pos = len(self._buf) + while nl_pos < 0: + item = next(self._gen) + local_pos = item.find(_newline(item)) + buf.append(item) + if local_pos >= 0: + nl_pos = pos + local_pos + break + pos += len(item) + except StopIteration: + pass + if buf: + self._buf_append(_mixed_join(buf, self.sentinel)) + + if self._buf is None: + return self.sentinel + + if nl_pos < 0: + new_pos = len(self._buf) + else: + new_pos = nl_pos + 1 + if length is not None and self.pos + length < new_pos: + new_pos = self.pos + length + try: + return self._buf[self.pos:new_pos] + finally: + self.pos = min(new_pos, len(self._buf)) + + def readlines(self, sizehint=0): + total = 0 + lines = [] + line = self.readline() + while line: + lines.append(line) + total += len(line) + if 0 < sizehint <= total: + break + line = self.readline() + return lines diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/jsrouting.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/jsrouting.py new file mode 100644 index 0000000..d815892 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/jsrouting.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.jsrouting + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Addon module that allows to create a JavaScript function from a map + that generates rules. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +try: + from simplejson import dumps +except ImportError: + try: + from json import dumps + except ImportError: + def dumps(*args): + raise RuntimeError('simplejson required for jsrouting') + +from inspect import getmro +from werkzeug.routing import NumberConverter +from werkzeug._compat import iteritems + + +def render_template(name_parts, rules, converters): + result = u'' + if name_parts: + for idx in range(0, len(name_parts) - 1): + name = u'.'.join(name_parts[:idx + 1]) + result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name) + result += '%s = ' % '.'.join(name_parts) + result += """(function (server_name, script_name, subdomain, url_scheme) { + var converters = [%(converters)s]; + var rules = %(rules)s; + function in_array(array, value) { + if (array.indexOf != undefined) { + return array.indexOf(value) != -1; + } + for (var i = 0; i < array.length; i++) { + if (array[i] == value) { + return true; + } + } + return false; + } + function array_diff(array1, array2) { + array1 = array1.slice(); + for (var i = array1.length-1; i >= 0; i--) { + if (in_array(array2, array1[i])) { + array1.splice(i, 1); + } + } + return array1; + } + function split_obj(obj) { + var names = []; + var values = []; + for (var name in obj) { + if (typeof(obj[name]) != 'function') { + names.push(name); + values.push(obj[name]); + } + } + return {names: names, values: values, original: obj}; + } + function suitable(rule, args) { + var default_args = split_obj(rule.defaults || {}); + var diff_arg_names = array_diff(rule.arguments, default_args.names); + + for (var i = 0; i < diff_arg_names.length; i++) { + if (!in_array(args.names, diff_arg_names[i])) { + return false; + } + } + + if (array_diff(rule.arguments, args.names).length == 0) { + if (rule.defaults == null) { + return true; + } + for (var i = 0; i < default_args.names.length; i++) { + var key = default_args.names[i]; + var value = default_args.values[i]; + if (value != args.original[key]) { + return false; + } + } + } + + return true; + } + function build(rule, args) { + var tmp = []; + var processed = rule.arguments.slice(); + for (var i = 0; i < rule.trace.length; i++) { + var part = rule.trace[i]; + if (part.is_dynamic) { + var converter = converters[rule.converters[part.data]]; + var data = converter(args.original[part.data]); + if (data == null) { + return null; + } + tmp.push(data); + processed.push(part.name); + } else { + tmp.push(part.data); + } + } + tmp = tmp.join(''); + var pipe = tmp.indexOf('|'); + var subdomain = tmp.substring(0, pipe); + var url = tmp.substring(pipe+1); + + var unprocessed = array_diff(args.names, processed); + var first_query_var = true; + for (var i = 0; i < unprocessed.length; i++) { + if (first_query_var) { + url += '?'; + } else { + url += '&'; + } + first_query_var = false; + url += encodeURIComponent(unprocessed[i]); + url += '='; + url += encodeURIComponent(args.original[unprocessed[i]]); + } + return {subdomain: subdomain, path: url}; + } + function lstrip(s, c) { + while (s && s.substring(0, 1) == c) { + s = s.substring(1); + } + return s; + } + function rstrip(s, c) { + while (s && s.substring(s.length-1, s.length) == c) { + s = s.substring(0, s.length-1); + } + return s; + } + return function(endpoint, args, force_external) { + args = split_obj(args); + var rv = null; + for (var i = 0; i < rules.length; i++) { + var rule = rules[i]; + if (rule.endpoint != endpoint) continue; + if (suitable(rule, args)) { + rv = build(rule, args); + if (rv != null) { + break; + } + } + } + if (rv == null) { + return null; + } + if (!force_external && rv.subdomain == subdomain) { + return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/'); + } else { + return url_scheme + '://' + + (rv.subdomain ? rv.subdomain + '.' : '') + + server_name + rstrip(script_name, '/') + + '/' + lstrip(rv.path, '/'); + } + }; +})""" % {'converters': u', '.join(converters), + 'rules': rules} + + return result + + +def generate_map(map, name='url_map'): + """ + Generates a JavaScript function containing the rules defined in + this map, to be used with a MapAdapter's generate_javascript + method. If you don't pass a name the returned JavaScript code is + an expression that returns a function. Otherwise it's a standalone + script that assigns the function with that name. Dotted names are + resolved (so you an use a name like 'obj.url_for') + + In order to use JavaScript generation, simplejson must be installed. + + Note that using this feature will expose the rules + defined in your map to users. If your rules contain sensitive + information, don't use JavaScript generation! + """ + from warnings import warn + warn(DeprecationWarning('This module is deprecated')) + map.update() + rules = [] + converters = [] + for rule in map.iter_rules(): + trace = [{ + 'is_dynamic': is_dynamic, + 'data': data + } for is_dynamic, data in rule._trace] + rule_converters = {} + for key, converter in iteritems(rule._converters): + js_func = js_to_url_function(converter) + try: + index = converters.index(js_func) + except ValueError: + converters.append(js_func) + index = len(converters) - 1 + rule_converters[key] = index + rules.append({ + u'endpoint': rule.endpoint, + u'arguments': list(rule.arguments), + u'converters': rule_converters, + u'trace': trace, + u'defaults': rule.defaults + }) + + return render_template(name_parts=name and name.split('.') or [], + rules=dumps(rules), + converters=converters) + + +def generate_adapter(adapter, name='url_for', map_name='url_map'): + """Generates the url building function for a map.""" + values = { + u'server_name': dumps(adapter.server_name), + u'script_name': dumps(adapter.script_name), + u'subdomain': dumps(adapter.subdomain), + u'url_scheme': dumps(adapter.url_scheme), + u'name': name, + u'map_name': map_name + } + return u'''\ +var %(name)s = %(map_name)s( + %(server_name)s, + %(script_name)s, + %(subdomain)s, + %(url_scheme)s +);''' % values + + +def js_to_url_function(converter): + """Get the JavaScript converter function from a rule.""" + if hasattr(converter, 'js_to_url_function'): + data = converter.js_to_url_function() + else: + for cls in getmro(type(converter)): + if cls in js_to_url_functions: + data = js_to_url_functions[cls](converter) + break + else: + return 'encodeURIComponent' + return '(function(value) { %s })' % data + + +def NumberConverter_js_to_url(conv): + if conv.fixed_digits: + return u'''\ +var result = value.toString(); +while (result.length < %s) + result = '0' + result; +return result;''' % conv.fixed_digits + return u'return value.toString();' + + +js_to_url_functions = { + NumberConverter: NumberConverter_js_to_url +} diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/limiter.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/limiter.py new file mode 100644 index 0000000..6bc9d39 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/limiter.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.limiter + ~~~~~~~~~~~~~~~~~~~~~~~~ + + A middleware that limits incoming data. This works around problems with + Trac_ or Django_ because those directly stream into the memory. + + .. _Trac: http://trac.edgewall.org/ + .. _Django: http://www.djangoproject.com/ + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from warnings import warn + +from werkzeug.wsgi import LimitedStream + + +class StreamLimitMiddleware(object): + + """Limits the input stream to a given number of bytes. This is useful if + you have a WSGI application that reads form data into memory (django for + example) and you don't want users to harm the server by uploading tons of + data. + + Default is 10MB + + .. versionchanged:: 0.9 + Deprecated middleware. + """ + + def __init__(self, app, maximum_size=1024 * 1024 * 10): + warn(DeprecationWarning('This middleware is deprecated')) + self.app = app + self.maximum_size = maximum_size + + def __call__(self, environ, start_response): + limit = min(self.maximum_size, int(environ.get('CONTENT_LENGTH') or 0)) + environ['wsgi.input'] = LimitedStream(environ['wsgi.input'], limit) + return self.app(environ, start_response) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/lint.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/lint.py new file mode 100644 index 0000000..969c687 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/lint.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.lint + ~~~~~~~~~~~~~~~~~~~~~ + + .. versionadded:: 0.5 + + This module provides a middleware that performs sanity checks of the WSGI + application. It checks that :pep:`333` is properly implemented and warns + on some common HTTP errors such as non-empty responses for 304 status + codes. + + This module provides a middleware, the :class:`LintMiddleware`. Wrap your + application with it and it will warn about common problems with WSGI and + HTTP while your application is running. + + It's strongly recommended to use it during development. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +from warnings import warn + +from werkzeug.datastructures import Headers +from werkzeug.http import is_entity_header +from werkzeug.wsgi import FileWrapper +from werkzeug._compat import string_types + + +class WSGIWarning(Warning): + + """Warning class for WSGI warnings.""" + + +class HTTPWarning(Warning): + + """Warning class for HTTP warnings.""" + + +def check_string(context, obj, stacklevel=3): + if type(obj) is not str: + warn(WSGIWarning('%s requires bytestrings, got %s' % + (context, obj.__class__.__name__))) + + +class InputStream(object): + + def __init__(self, stream): + self._stream = stream + + def read(self, *args): + if len(args) == 0: + warn(WSGIWarning('wsgi does not guarantee an EOF marker on the ' + 'input stream, thus making calls to ' + 'wsgi.input.read() unsafe. Conforming servers ' + 'may never return from this call.'), + stacklevel=2) + elif len(args) != 1: + warn(WSGIWarning('too many parameters passed to wsgi.input.read()'), + stacklevel=2) + return self._stream.read(*args) + + def readline(self, *args): + if len(args) == 0: + warn(WSGIWarning('Calls to wsgi.input.readline() without arguments' + ' are unsafe. Use wsgi.input.read() instead.'), + stacklevel=2) + elif len(args) == 1: + warn(WSGIWarning('wsgi.input.readline() was called with a size hint. ' + 'WSGI does not support this, although it\'s available ' + 'on all major servers.'), + stacklevel=2) + else: + raise TypeError('too many arguments passed to wsgi.input.readline()') + return self._stream.readline(*args) + + def __iter__(self): + try: + return iter(self._stream) + except TypeError: + warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2) + return iter(()) + + def close(self): + warn(WSGIWarning('application closed the input stream!'), + stacklevel=2) + self._stream.close() + + +class ErrorStream(object): + + def __init__(self, stream): + self._stream = stream + + def write(self, s): + check_string('wsgi.error.write()', s) + self._stream.write(s) + + def flush(self): + self._stream.flush() + + def writelines(self, seq): + for line in seq: + self.write(seq) + + def close(self): + warn(WSGIWarning('application closed the error stream!'), + stacklevel=2) + self._stream.close() + + +class GuardedWrite(object): + + def __init__(self, write, chunks): + self._write = write + self._chunks = chunks + + def __call__(self, s): + check_string('write()', s) + self._write.write(s) + self._chunks.append(len(s)) + + +class GuardedIterator(object): + + def __init__(self, iterator, headers_set, chunks): + self._iterator = iterator + self._next = iter(iterator).next + self.closed = False + self.headers_set = headers_set + self.chunks = chunks + + def __iter__(self): + return self + + def next(self): + if self.closed: + warn(WSGIWarning('iterated over closed app_iter'), + stacklevel=2) + rv = self._next() + if not self.headers_set: + warn(WSGIWarning('Application returned before it ' + 'started the response'), stacklevel=2) + check_string('application iterator items', rv) + self.chunks.append(len(rv)) + return rv + + def close(self): + self.closed = True + if hasattr(self._iterator, 'close'): + self._iterator.close() + + if self.headers_set: + status_code, headers = self.headers_set + bytes_sent = sum(self.chunks) + content_length = headers.get('content-length', type=int) + + if status_code == 304: + for key, value in headers: + key = key.lower() + if key not in ('expires', 'content-location') and \ + is_entity_header(key): + warn(HTTPWarning('entity header %r found in 304 ' + 'response' % key)) + if bytes_sent: + warn(HTTPWarning('304 responses must not have a body')) + elif 100 <= status_code < 200 or status_code == 204: + if content_length != 0: + warn(HTTPWarning('%r responses must have an empty ' + 'content length' % status_code)) + if bytes_sent: + warn(HTTPWarning('%r responses must not have a body' % + status_code)) + elif content_length is not None and content_length != bytes_sent: + warn(WSGIWarning('Content-Length and the number of bytes ' + 'sent to the client do not match.')) + + def __del__(self): + if not self.closed: + try: + warn(WSGIWarning('Iterator was garbage collected before ' + 'it was closed.')) + except Exception: + pass + + +class LintMiddleware(object): + + """This middleware wraps an application and warns on common errors. + Among other thing it currently checks for the following problems: + + - invalid status codes + - non-bytestrings sent to the WSGI server + - strings returned from the WSGI application + - non-empty conditional responses + - unquoted etags + - relative URLs in the Location header + - unsafe calls to wsgi.input + - unclosed iterators + + Detected errors are emitted using the standard Python :mod:`warnings` + system and usually end up on :data:`stderr`. + + :: + + from werkzeug.contrib.lint import LintMiddleware + app = LintMiddleware(app) + + :param app: the application to wrap + """ + + def __init__(self, app): + self.app = app + + def check_environ(self, environ): + if type(environ) is not dict: + warn(WSGIWarning('WSGI environment is not a standard python dict.'), + stacklevel=4) + for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT', + 'wsgi.version', 'wsgi.input', 'wsgi.errors', + 'wsgi.multithread', 'wsgi.multiprocess', + 'wsgi.run_once'): + if key not in environ: + warn(WSGIWarning('required environment key %r not found' + % key), stacklevel=3) + if environ['wsgi.version'] != (1, 0): + warn(WSGIWarning('environ is not a WSGI 1.0 environ'), + stacklevel=3) + + script_name = environ.get('SCRIPT_NAME', '') + if script_name and script_name[:1] != '/': + warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r' + % script_name), stacklevel=3) + path_info = environ.get('PATH_INFO', '') + if path_info[:1] != '/': + warn(WSGIWarning('PATH_INFO does not start with a slash: %r' + % path_info), stacklevel=3) + + def check_start_response(self, status, headers, exc_info): + check_string('status', status) + status_code = status.split(None, 1)[0] + if len(status_code) != 3 or not status_code.isdigit(): + warn(WSGIWarning('Status code must be three digits'), stacklevel=3) + if len(status) < 4 or status[3] != ' ': + warn(WSGIWarning('Invalid value for status %r. Valid ' + 'status strings are three digits, a space ' + 'and a status explanation'), stacklevel=3) + status_code = int(status_code) + if status_code < 100: + warn(WSGIWarning('status code < 100 detected'), stacklevel=3) + + if type(headers) is not list: + warn(WSGIWarning('header list is not a list'), stacklevel=3) + for item in headers: + if type(item) is not tuple or len(item) != 2: + warn(WSGIWarning('Headers must tuple 2-item tuples'), + stacklevel=3) + name, value = item + if type(name) is not str or type(value) is not str: + warn(WSGIWarning('header items must be strings'), + stacklevel=3) + if name.lower() == 'status': + warn(WSGIWarning('The status header is not supported due to ' + 'conflicts with the CGI spec.'), + stacklevel=3) + + if exc_info is not None and not isinstance(exc_info, tuple): + warn(WSGIWarning('invalid value for exc_info'), stacklevel=3) + + headers = Headers(headers) + self.check_headers(headers) + + return status_code, headers + + def check_headers(self, headers): + etag = headers.get('etag') + if etag is not None: + if etag.startswith(('W/', 'w/')): + if etag.startswith('w/'): + warn(HTTPWarning('weak etag indicator should be upcase.'), + stacklevel=4) + etag = etag[2:] + if not (etag[:1] == etag[-1:] == '"'): + warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4) + + location = headers.get('location') + if location is not None: + if not urlparse(location).netloc: + warn(HTTPWarning('absolute URLs required for location header'), + stacklevel=4) + + def check_iterator(self, app_iter): + if isinstance(app_iter, string_types): + warn(WSGIWarning('application returned string. Response will ' + 'send character for character to the client ' + 'which will kill the performance. Return a ' + 'list or iterable instead.'), stacklevel=3) + + def __call__(self, *args, **kwargs): + if len(args) != 2: + warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2) + if kwargs: + warn(WSGIWarning('No keyword arguments to WSGI app allowed'), + stacklevel=2) + environ, start_response = args + + self.check_environ(environ) + environ['wsgi.input'] = InputStream(environ['wsgi.input']) + environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors']) + + # hook our own file wrapper in so that applications will always + # iterate to the end and we can check the content length + environ['wsgi.file_wrapper'] = FileWrapper + + headers_set = [] + chunks = [] + + def checking_start_response(*args, **kwargs): + if len(args) not in (2, 3): + warn(WSGIWarning('Invalid number of arguments: %s, expected ' + '2 or 3' % len(args), stacklevel=2)) + if kwargs: + warn(WSGIWarning('no keyword arguments allowed.')) + + status, headers = args[:2] + if len(args) == 3: + exc_info = args[2] + else: + exc_info = None + + headers_set[:] = self.check_start_response(status, headers, + exc_info) + return GuardedWrite(start_response(status, headers, exc_info), + chunks) + + app_iter = self.app(environ, checking_start_response) + self.check_iterator(app_iter) + return GuardedIterator(app_iter, headers_set, chunks) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/profiler.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/profiler.py new file mode 100644 index 0000000..be860af --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/profiler.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.profiler + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module provides a simple WSGI profiler middleware for finding + bottlenecks in web application. It uses the :mod:`profile` or + :mod:`cProfile` module to do the profiling and writes the stats to the + stream provided (defaults to stderr). + + Example usage:: + + from werkzeug.contrib.profiler import ProfilerMiddleware + app = ProfilerMiddleware(app) + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import sys +import time +import os.path +try: + try: + from cProfile import Profile + except ImportError: + from profile import Profile + from pstats import Stats + available = True +except ImportError: + available = False + + +class MergeStream(object): + + """An object that redirects `write` calls to multiple streams. + Use this to log to both `sys.stdout` and a file:: + + f = open('profiler.log', 'w') + stream = MergeStream(sys.stdout, f) + profiler = ProfilerMiddleware(app, stream) + """ + + def __init__(self, *streams): + if not streams: + raise TypeError('at least one stream must be given') + self.streams = streams + + def write(self, data): + for stream in self.streams: + stream.write(data) + + +class ProfilerMiddleware(object): + + """Simple profiler middleware. Wraps a WSGI application and profiles + a request. This intentionally buffers the response so that timings are + more exact. + + By giving the `profile_dir` argument, pstat.Stats files are saved to that + directory, one file per request. Without it, a summary is printed to + `stream` instead. + + For the exact meaning of `sort_by` and `restrictions` consult the + :mod:`profile` documentation. + + .. versionadded:: 0.9 + Added support for `restrictions` and `profile_dir`. + + :param app: the WSGI application to profile. + :param stream: the stream for the profiled stats. defaults to stderr. + :param sort_by: a tuple of columns to sort the result by. + :param restrictions: a tuple of profiling strictions, not used if dumping + to `profile_dir`. + :param profile_dir: directory name to save pstat files + """ + + def __init__(self, app, stream=None, + sort_by=('time', 'calls'), restrictions=(), profile_dir=None): + if not available: + raise RuntimeError('the profiler is not available because ' + 'profile or pstat is not installed.') + self._app = app + self._stream = stream or sys.stdout + self._sort_by = sort_by + self._restrictions = restrictions + self._profile_dir = profile_dir + + def __call__(self, environ, start_response): + response_body = [] + + def catching_start_response(status, headers, exc_info=None): + start_response(status, headers, exc_info) + return response_body.append + + def runapp(): + appiter = self._app(environ, catching_start_response) + response_body.extend(appiter) + if hasattr(appiter, 'close'): + appiter.close() + + p = Profile() + start = time.time() + p.runcall(runapp) + body = b''.join(response_body) + elapsed = time.time() - start + + if self._profile_dir is not None: + prof_filename = os.path.join(self._profile_dir, + '%s.%s.%06dms.%d.prof' % ( + environ['REQUEST_METHOD'], + environ.get('PATH_INFO').strip( + '/').replace('/', '.') or 'root', + elapsed * 1000.0, + time.time() + )) + p.dump_stats(prof_filename) + + else: + stats = Stats(p, stream=self._stream) + stats.sort_stats(*self._sort_by) + + self._stream.write('-' * 80) + self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO')) + stats.print_stats(*self._restrictions) + self._stream.write('-' * 80 + '\n\n') + + return [body] + + +def make_action(app_factory, hostname='localhost', port=5000, + threaded=False, processes=1, stream=None, + sort_by=('time', 'calls'), restrictions=()): + """Return a new callback for :mod:`werkzeug.script` that starts a local + server with the profiler enabled. + + :: + + from werkzeug.contrib import profiler + action_profile = profiler.make_action(make_app) + """ + def action(hostname=('h', hostname), port=('p', port), + threaded=threaded, processes=processes): + """Start a new development server.""" + from werkzeug.serving import run_simple + app = ProfilerMiddleware(app_factory(), stream, sort_by, restrictions) + run_simple(hostname, port, app, False, None, threaded, processes) + return action diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/securecookie.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/securecookie.py new file mode 100644 index 0000000..5927e2b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/securecookie.py @@ -0,0 +1,323 @@ +# -*- coding: utf-8 -*- +r""" + werkzeug.contrib.securecookie + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements a cookie that is not alterable from the client + because it adds a checksum the server checks for. You can use it as + session replacement if all you have is a user id or something to mark + a logged in user. + + Keep in mind that the data is still readable from the client as a + normal cookie is. However you don't have to store and flush the + sessions you have at the server. + + Example usage: + + >>> from werkzeug.contrib.securecookie import SecureCookie + >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef") + + Dumping into a string so that one can store it in a cookie: + + >>> value = x.serialize() + + Loading from that string again: + + >>> x = SecureCookie.unserialize(value, "deadbeef") + >>> x["baz"] + (1, 2, 3) + + If someone modifies the cookie and the checksum is wrong the unserialize + method will fail silently and return a new empty `SecureCookie` object. + + Keep in mind that the values will be visible in the cookie so do not + store data in a cookie you don't want the user to see. + + Application Integration + ======================= + + If you are using the werkzeug request objects you could integrate the + secure cookie into your application like this:: + + from werkzeug.utils import cached_property + from werkzeug.wrappers import BaseRequest + from werkzeug.contrib.securecookie import SecureCookie + + # don't use this key but a different one; you could just use + # os.urandom(20) to get something random + SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea' + + class Request(BaseRequest): + + @cached_property + def client_session(self): + data = self.cookies.get('session_data') + if not data: + return SecureCookie(secret_key=SECRET_KEY) + return SecureCookie.unserialize(data, SECRET_KEY) + + def application(environ, start_response): + request = Request(environ) + + # get a response object here + response = ... + + if request.client_session.should_save: + session_data = request.client_session.serialize() + response.set_cookie('session_data', session_data, + httponly=True) + return response(environ, start_response) + + A less verbose integration can be achieved by using shorthand methods:: + + class Request(BaseRequest): + + @cached_property + def client_session(self): + return SecureCookie.load_cookie(self, secret_key=COOKIE_SECRET) + + def application(environ, start_response): + request = Request(environ) + + # get a response object here + response = ... + + request.client_session.save_cookie(response) + return response(environ, start_response) + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import pickle +import base64 +from hmac import new as hmac +from time import time +from hashlib import sha1 as _default_hash + +from werkzeug._compat import iteritems, text_type, to_bytes +from werkzeug.urls import url_quote_plus, url_unquote_plus +from werkzeug._internal import _date_to_unix +from werkzeug.contrib.sessions import ModificationTrackingDict +from werkzeug.security import safe_str_cmp +from werkzeug._compat import to_native + + +class UnquoteError(Exception): + + """Internal exception used to signal failures on quoting.""" + + +class SecureCookie(ModificationTrackingDict): + + """Represents a secure cookie. You can subclass this class and provide + an alternative mac method. The import thing is that the mac method + is a function with a similar interface to the hashlib. Required + methods are update() and digest(). + + Example usage: + + >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef") + >>> x["foo"] + 42 + >>> x["baz"] + (1, 2, 3) + >>> x["blafasel"] = 23 + >>> x.should_save + True + + :param data: the initial data. Either a dict, list of tuples or `None`. + :param secret_key: the secret key. If not set `None` or not specified + it has to be set before :meth:`serialize` is called. + :param new: The initial value of the `new` flag. + """ + + #: The hash method to use. This has to be a module with a new function + #: or a function that creates a hashlib object. Such as `hashlib.md5` + #: Subclasses can override this attribute. The default hash is sha1. + #: Make sure to wrap this in staticmethod() if you store an arbitrary + #: function there such as hashlib.sha1 which might be implemented + #: as a function. + hash_method = staticmethod(_default_hash) + + #: the module used for serialization. Unless overriden by subclasses + #: the standard pickle module is used. + serialization_method = pickle + + #: if the contents should be base64 quoted. This can be disabled if the + #: serialization process returns cookie safe strings only. + quote_base64 = True + + def __init__(self, data=None, secret_key=None, new=True): + ModificationTrackingDict.__init__(self, data or ()) + # explicitly convert it into a bytestring because python 2.6 + # no longer performs an implicit string conversion on hmac + if secret_key is not None: + secret_key = to_bytes(secret_key, 'utf-8') + self.secret_key = secret_key + self.new = new + + def __repr__(self): + return '<%s %s%s>' % ( + self.__class__.__name__, + dict.__repr__(self), + self.should_save and '*' or '' + ) + + @property + def should_save(self): + """True if the session should be saved. By default this is only true + for :attr:`modified` cookies, not :attr:`new`. + """ + return self.modified + + @classmethod + def quote(cls, value): + """Quote the value for the cookie. This can be any object supported + by :attr:`serialization_method`. + + :param value: the value to quote. + """ + if cls.serialization_method is not None: + value = cls.serialization_method.dumps(value) + if cls.quote_base64: + value = b''.join(base64.b64encode(value).splitlines()).strip() + return value + + @classmethod + def unquote(cls, value): + """Unquote the value for the cookie. If unquoting does not work a + :exc:`UnquoteError` is raised. + + :param value: the value to unquote. + """ + try: + if cls.quote_base64: + value = base64.b64decode(value) + if cls.serialization_method is not None: + value = cls.serialization_method.loads(value) + return value + except Exception: + # unfortunately pickle and other serialization modules can + # cause pretty every error here. if we get one we catch it + # and convert it into an UnquoteError + raise UnquoteError() + + def serialize(self, expires=None): + """Serialize the secure cookie into a string. + + If expires is provided, the session will be automatically invalidated + after expiration when you unseralize it. This provides better + protection against session cookie theft. + + :param expires: an optional expiration date for the cookie (a + :class:`datetime.datetime` object) + """ + if self.secret_key is None: + raise RuntimeError('no secret key defined') + if expires: + self['_expires'] = _date_to_unix(expires) + result = [] + mac = hmac(self.secret_key, None, self.hash_method) + for key, value in sorted(self.items()): + result.append(('%s=%s' % ( + url_quote_plus(key), + self.quote(value).decode('ascii') + )).encode('ascii')) + mac.update(b'|' + result[-1]) + return b'?'.join([ + base64.b64encode(mac.digest()).strip(), + b'&'.join(result) + ]) + + @classmethod + def unserialize(cls, string, secret_key): + """Load the secure cookie from a serialized string. + + :param string: the cookie value to unserialize. + :param secret_key: the secret key used to serialize the cookie. + :return: a new :class:`SecureCookie`. + """ + if isinstance(string, text_type): + string = string.encode('utf-8', 'replace') + if isinstance(secret_key, text_type): + secret_key = secret_key.encode('utf-8', 'replace') + try: + base64_hash, data = string.split(b'?', 1) + except (ValueError, IndexError): + items = () + else: + items = {} + mac = hmac(secret_key, None, cls.hash_method) + for item in data.split(b'&'): + mac.update(b'|' + item) + if b'=' not in item: + items = None + break + key, value = item.split(b'=', 1) + # try to make the key a string + key = url_unquote_plus(key.decode('ascii')) + try: + key = to_native(key) + except UnicodeError: + pass + items[key] = value + + # no parsing error and the mac looks okay, we can now + # sercurely unpickle our cookie. + try: + client_hash = base64.b64decode(base64_hash) + except TypeError: + items = client_hash = None + if items is not None and safe_str_cmp(client_hash, mac.digest()): + try: + for key, value in iteritems(items): + items[key] = cls.unquote(value) + except UnquoteError: + items = () + else: + if '_expires' in items: + if time() > items['_expires']: + items = () + else: + del items['_expires'] + else: + items = () + return cls(items, secret_key, False) + + @classmethod + def load_cookie(cls, request, key='session', secret_key=None): + """Loads a :class:`SecureCookie` from a cookie in request. If the + cookie is not set, a new :class:`SecureCookie` instanced is + returned. + + :param request: a request object that has a `cookies` attribute + which is a dict of all cookie values. + :param key: the name of the cookie. + :param secret_key: the secret key used to unquote the cookie. + Always provide the value even though it has + no default! + """ + data = request.cookies.get(key) + if not data: + return cls(secret_key=secret_key) + return cls.unserialize(data, secret_key) + + def save_cookie(self, response, key='session', expires=None, + session_expires=None, max_age=None, path='/', domain=None, + secure=None, httponly=False, force=False): + """Saves the SecureCookie in a cookie on response object. All + parameters that are not described here are forwarded directly + to :meth:`~BaseResponse.set_cookie`. + + :param response: a response object that has a + :meth:`~BaseResponse.set_cookie` method. + :param key: the name of the cookie. + :param session_expires: the expiration date of the secure cookie + stored information. If this is not provided + the cookie `expires` date is used instead. + """ + if force or self.should_save: + data = self.serialize(session_expires or expires) + response.set_cookie(key, data, expires=expires, max_age=max_age, + path=path, domain=domain, secure=secure, + httponly=httponly) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/sessions.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/sessions.py new file mode 100644 index 0000000..a9c3aa7 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/sessions.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +r""" + werkzeug.contrib.sessions + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module contains some helper classes that help one to add session + support to a python WSGI application. For full client-side session + storage see :mod:`~werkzeug.contrib.securecookie` which implements a + secure, client-side session storage. + + + Application Integration + ======================= + + :: + + from werkzeug.contrib.sessions import SessionMiddleware, \ + FilesystemSessionStore + + app = SessionMiddleware(app, FilesystemSessionStore()) + + The current session will then appear in the WSGI environment as + `werkzeug.session`. However it's recommended to not use the middleware + but the stores directly in the application. However for very simple + scripts a middleware for sessions could be sufficient. + + This module does not implement methods or ways to check if a session is + expired. That should be done by a cronjob and storage specific. For + example to prune unused filesystem sessions one could check the modified + time of the files. If sessions are stored in the database the new() + method should add an expiration timestamp for the session. + + For better flexibility it's recommended to not use the middleware but the + store and session object directly in the application dispatching:: + + session_store = FilesystemSessionStore() + + def application(environ, start_response): + request = Request(environ) + sid = request.cookies.get('cookie_name') + if sid is None: + request.session = session_store.new() + else: + request.session = session_store.get(sid) + response = get_the_response_object(request) + if request.session.should_save: + session_store.save(request.session) + response.set_cookie('cookie_name', request.session.sid) + return response(environ, start_response) + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import os +import tempfile +from os import path +from time import time +from random import random +from hashlib import sha1 +from pickle import dump, load, HIGHEST_PROTOCOL + +from werkzeug.datastructures import CallbackDict +from werkzeug.utils import dump_cookie, parse_cookie +from werkzeug.wsgi import ClosingIterator +from werkzeug.posixemulation import rename +from werkzeug._compat import PY2, text_type +from werkzeug.filesystem import get_filesystem_encoding + + +_sha1_re = re.compile(r'^[a-f0-9]{40}$') + + +def _urandom(): + if hasattr(os, 'urandom'): + return os.urandom(30) + return text_type(random()).encode('ascii') + + +def generate_key(salt=None): + if salt is None: + salt = repr(salt).encode('ascii') + return sha1(b''.join([ + salt, + str(time()).encode('ascii'), + _urandom() + ])).hexdigest() + + +class ModificationTrackingDict(CallbackDict): + __slots__ = ('modified',) + + def __init__(self, *args, **kwargs): + def on_update(self): + self.modified = True + self.modified = False + CallbackDict.__init__(self, on_update=on_update) + dict.update(self, *args, **kwargs) + + def copy(self): + """Create a flat copy of the dict.""" + missing = object() + result = object.__new__(self.__class__) + for name in self.__slots__: + val = getattr(self, name, missing) + if val is not missing: + setattr(result, name, val) + return result + + def __copy__(self): + return self.copy() + + +class Session(ModificationTrackingDict): + + """Subclass of a dict that keeps track of direct object changes. Changes + in mutable structures are not tracked, for those you have to set + `modified` to `True` by hand. + """ + __slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new') + + def __init__(self, data, sid, new=False): + ModificationTrackingDict.__init__(self, data) + self.sid = sid + self.new = new + + def __repr__(self): + return '<%s %s%s>' % ( + self.__class__.__name__, + dict.__repr__(self), + self.should_save and '*' or '' + ) + + @property + def should_save(self): + """True if the session should be saved. + + .. versionchanged:: 0.6 + By default the session is now only saved if the session is + modified, not if it is new like it was before. + """ + return self.modified + + +class SessionStore(object): + + """Baseclass for all session stores. The Werkzeug contrib module does not + implement any useful stores besides the filesystem store, application + developers are encouraged to create their own stores. + + :param session_class: The session class to use. Defaults to + :class:`Session`. + """ + + def __init__(self, session_class=None): + if session_class is None: + session_class = Session + self.session_class = session_class + + def is_valid_key(self, key): + """Check if a key has the correct format.""" + return _sha1_re.match(key) is not None + + def generate_key(self, salt=None): + """Simple function that generates a new session key.""" + return generate_key(salt) + + def new(self): + """Generate a new session.""" + return self.session_class({}, self.generate_key(), True) + + def save(self, session): + """Save a session.""" + + def save_if_modified(self, session): + """Save if a session class wants an update.""" + if session.should_save: + self.save(session) + + def delete(self, session): + """Delete a session.""" + + def get(self, sid): + """Get a session for this sid or a new session object. This method + has to check if the session key is valid and create a new session if + that wasn't the case. + """ + return self.session_class({}, sid, True) + + +#: used for temporary files by the filesystem session store +_fs_transaction_suffix = '.__wz_sess' + + +class FilesystemSessionStore(SessionStore): + + """Simple example session store that saves sessions on the filesystem. + This store works best on POSIX systems and Windows Vista / Windows + Server 2008 and newer. + + .. versionchanged:: 0.6 + `renew_missing` was added. Previously this was considered `True`, + now the default changed to `False` and it can be explicitly + deactivated. + + :param path: the path to the folder used for storing the sessions. + If not provided the default temporary directory is used. + :param filename_template: a string template used to give the session + a filename. ``%s`` is replaced with the + session id. + :param session_class: The session class to use. Defaults to + :class:`Session`. + :param renew_missing: set to `True` if you want the store to + give the user a new sid if the session was + not yet saved. + """ + + def __init__(self, path=None, filename_template='werkzeug_%s.sess', + session_class=None, renew_missing=False, mode=0o644): + SessionStore.__init__(self, session_class) + if path is None: + path = tempfile.gettempdir() + self.path = path + if isinstance(filename_template, text_type) and PY2: + filename_template = filename_template.encode( + get_filesystem_encoding()) + assert not filename_template.endswith(_fs_transaction_suffix), \ + 'filename templates may not end with %s' % _fs_transaction_suffix + self.filename_template = filename_template + self.renew_missing = renew_missing + self.mode = mode + + def get_session_filename(self, sid): + # out of the box, this should be a strict ASCII subset but + # you might reconfigure the session object to have a more + # arbitrary string. + if isinstance(sid, text_type) and PY2: + sid = sid.encode(get_filesystem_encoding()) + return path.join(self.path, self.filename_template % sid) + + def save(self, session): + fn = self.get_session_filename(session.sid) + fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix, + dir=self.path) + f = os.fdopen(fd, 'wb') + try: + dump(dict(session), f, HIGHEST_PROTOCOL) + finally: + f.close() + try: + rename(tmp, fn) + os.chmod(fn, self.mode) + except (IOError, OSError): + pass + + def delete(self, session): + fn = self.get_session_filename(session.sid) + try: + os.unlink(fn) + except OSError: + pass + + def get(self, sid): + if not self.is_valid_key(sid): + return self.new() + try: + f = open(self.get_session_filename(sid), 'rb') + except IOError: + if self.renew_missing: + return self.new() + data = {} + else: + try: + try: + data = load(f) + except Exception: + data = {} + finally: + f.close() + return self.session_class(data, sid, False) + + def list(self): + """Lists all sessions in the store. + + .. versionadded:: 0.6 + """ + before, after = self.filename_template.split('%s', 1) + filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before), + re.escape(after))) + result = [] + for filename in os.listdir(self.path): + #: this is a session that is still being saved. + if filename.endswith(_fs_transaction_suffix): + continue + match = filename_re.match(filename) + if match is not None: + result.append(match.group(1)) + return result + + +class SessionMiddleware(object): + + """A simple middleware that puts the session object of a store provided + into the WSGI environ. It automatically sets cookies and restores + sessions. + + However a middleware is not the preferred solution because it won't be as + fast as sessions managed by the application itself and will put a key into + the WSGI environment only relevant for the application which is against + the concept of WSGI. + + The cookie parameters are the same as for the :func:`~dump_cookie` + function just prefixed with ``cookie_``. Additionally `max_age` is + called `cookie_age` and not `cookie_max_age` because of backwards + compatibility. + """ + + def __init__(self, app, store, cookie_name='session_id', + cookie_age=None, cookie_expires=None, cookie_path='/', + cookie_domain=None, cookie_secure=None, + cookie_httponly=False, environ_key='werkzeug.session'): + self.app = app + self.store = store + self.cookie_name = cookie_name + self.cookie_age = cookie_age + self.cookie_expires = cookie_expires + self.cookie_path = cookie_path + self.cookie_domain = cookie_domain + self.cookie_secure = cookie_secure + self.cookie_httponly = cookie_httponly + self.environ_key = environ_key + + def __call__(self, environ, start_response): + cookie = parse_cookie(environ.get('HTTP_COOKIE', '')) + sid = cookie.get(self.cookie_name, None) + if sid is None: + session = self.store.new() + else: + session = self.store.get(sid) + environ[self.environ_key] = session + + def injecting_start_response(status, headers, exc_info=None): + if session.should_save: + self.store.save(session) + headers.append(('Set-Cookie', dump_cookie(self.cookie_name, + session.sid, self.cookie_age, + self.cookie_expires, self.cookie_path, + self.cookie_domain, self.cookie_secure, + self.cookie_httponly))) + return start_response(status, headers, exc_info) + return ClosingIterator(self.app(environ, injecting_start_response), + lambda: self.store.save_if_modified(session)) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/testtools.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/testtools.py new file mode 100644 index 0000000..3dab6db --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/testtools.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.testtools + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module implements extended wrappers for simplified testing. + + `TestResponse` + A response wrapper which adds various cached attributes for + simplified assertions on various content types. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from werkzeug.utils import cached_property, import_string +from werkzeug.wrappers import Response + +from warnings import warn +warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and ' + 'will be removed with Werkzeug 1.0')) + + +class ContentAccessors(object): + + """ + A mixin class for response objects that provides a couple of useful + accessors for unittesting. + """ + + def xml(self): + """Get an etree if possible.""" + if 'xml' not in self.mimetype: + raise AttributeError( + 'Not a XML response (Content-Type: %s)' + % self.mimetype) + for module in ['xml.etree.ElementTree', 'ElementTree', + 'elementtree.ElementTree']: + etree = import_string(module, silent=True) + if etree is not None: + return etree.XML(self.body) + raise RuntimeError('You must have ElementTree installed ' + 'to use TestResponse.xml') + xml = cached_property(xml) + + def lxml(self): + """Get an lxml etree if possible.""" + if ('html' not in self.mimetype and 'xml' not in self.mimetype): + raise AttributeError('Not an HTML/XML response') + from lxml import etree + try: + from lxml.html import fromstring + except ImportError: + fromstring = etree.HTML + if self.mimetype == 'text/html': + return fromstring(self.data) + return etree.XML(self.data) + lxml = cached_property(lxml) + + def json(self): + """Get the result of simplejson.loads if possible.""" + if 'json' not in self.mimetype: + raise AttributeError('Not a JSON response') + try: + from simplejson import loads + except ImportError: + from json import loads + return loads(self.data) + json = cached_property(json) + + +class TestResponse(Response, ContentAccessors): + + """Pass this to `werkzeug.test.Client` for easier unittesting.""" diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/wrappers.py b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/wrappers.py new file mode 100644 index 0000000..6c86452 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/contrib/wrappers.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.wrappers + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Extra wrappers or mixins contributed by the community. These wrappers can + be mixed in into request objects to add extra functionality. + + Example:: + + from werkzeug.wrappers import Request as RequestBase + from werkzeug.contrib.wrappers import JSONRequestMixin + + class Request(RequestBase, JSONRequestMixin): + pass + + Afterwards this request object provides the extra functionality of the + :class:`JSONRequestMixin`. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import codecs +try: + from simplejson import loads +except ImportError: + from json import loads + +from werkzeug.exceptions import BadRequest +from werkzeug.utils import cached_property +from werkzeug.http import dump_options_header, parse_options_header +from werkzeug._compat import wsgi_decoding_dance + + +def is_known_charset(charset): + """Checks if the given charset is known to Python.""" + try: + codecs.lookup(charset) + except LookupError: + return False + return True + + +class JSONRequestMixin(object): + + """Add json method to a request object. This will parse the input data + through simplejson if possible. + + :exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type + is not json or if the data itself cannot be parsed as json. + """ + + @cached_property + def json(self): + """Get the result of simplejson.loads if possible.""" + if 'json' not in self.environ.get('CONTENT_TYPE', ''): + raise BadRequest('Not a JSON request') + try: + return loads(self.data.decode(self.charset, self.encoding_errors)) + except Exception: + raise BadRequest('Unable to read JSON request') + + +class ProtobufRequestMixin(object): + + """Add protobuf parsing method to a request object. This will parse the + input data through `protobuf`_ if possible. + + :exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type + is not protobuf or if the data itself cannot be parsed property. + + .. _protobuf: http://code.google.com/p/protobuf/ + """ + + #: by default the :class:`ProtobufRequestMixin` will raise a + #: :exc:`~werkzeug.exceptions.BadRequest` if the object is not + #: initialized. You can bypass that check by setting this + #: attribute to `False`. + protobuf_check_initialization = True + + def parse_protobuf(self, proto_type): + """Parse the data into an instance of proto_type.""" + if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''): + raise BadRequest('Not a Protobuf request') + + obj = proto_type() + try: + obj.ParseFromString(self.data) + except Exception: + raise BadRequest("Unable to parse Protobuf request") + + # Fail if not all required fields are set + if self.protobuf_check_initialization and not obj.IsInitialized(): + raise BadRequest("Partial Protobuf request") + + return obj + + +class RoutingArgsRequestMixin(object): + + """This request mixin adds support for the wsgiorg routing args + `specification`_. + + .. _specification: https://wsgi.readthedocs.io/en/latest/specifications/routing_args.html + """ + + def _get_routing_args(self): + return self.environ.get('wsgiorg.routing_args', (()))[0] + + def _set_routing_args(self, value): + if self.shallow: + raise RuntimeError('A shallow request tried to modify the WSGI ' + 'environment. If you really want to do that, ' + 'set `shallow` to False.') + self.environ['wsgiorg.routing_args'] = (value, self.routing_vars) + + routing_args = property(_get_routing_args, _set_routing_args, doc=''' + The positional URL arguments as `tuple`.''') + del _get_routing_args, _set_routing_args + + def _get_routing_vars(self): + rv = self.environ.get('wsgiorg.routing_args') + if rv is not None: + return rv[1] + rv = {} + if not self.shallow: + self.routing_vars = rv + return rv + + def _set_routing_vars(self, value): + if self.shallow: + raise RuntimeError('A shallow request tried to modify the WSGI ' + 'environment. If you really want to do that, ' + 'set `shallow` to False.') + self.environ['wsgiorg.routing_args'] = (self.routing_args, value) + + routing_vars = property(_get_routing_vars, _set_routing_vars, doc=''' + The keyword URL arguments as `dict`.''') + del _get_routing_vars, _set_routing_vars + + +class ReverseSlashBehaviorRequestMixin(object): + + """This mixin reverses the trailing slash behavior of :attr:`script_root` + and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin` + directly on the paths. + + Because it changes the behavior or :class:`Request` this class has to be + mixed in *before* the actual request class:: + + class MyRequest(ReverseSlashBehaviorRequestMixin, Request): + pass + + This example shows the differences (for an application mounted on + `/application` and the request going to `/application/foo/bar`): + + +---------------+-------------------+---------------------+ + | | normal behavior | reverse behavior | + +===============+===================+=====================+ + | `script_root` | ``/application`` | ``/application/`` | + +---------------+-------------------+---------------------+ + | `path` | ``/foo/bar`` | ``foo/bar`` | + +---------------+-------------------+---------------------+ + """ + + @cached_property + def path(self): + """Requested path as unicode. This works a bit like the regular path + info in the WSGI environment but will not include a leading slash. + """ + path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '', + self.charset, self.encoding_errors) + return path.lstrip('/') + + @cached_property + def script_root(self): + """The root path of the script includling a trailing slash.""" + path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '', + self.charset, self.encoding_errors) + return path.rstrip('/') + '/' + + +class DynamicCharsetRequestMixin(object): + + """"If this mixin is mixed into a request class it will provide + a dynamic `charset` attribute. This means that if the charset is + transmitted in the content type headers it's used from there. + + Because it changes the behavior or :class:`Request` this class has + to be mixed in *before* the actual request class:: + + class MyRequest(DynamicCharsetRequestMixin, Request): + pass + + By default the request object assumes that the URL charset is the + same as the data charset. If the charset varies on each request + based on the transmitted data it's not a good idea to let the URLs + change based on that. Most browsers assume either utf-8 or latin1 + for the URLs if they have troubles figuring out. It's strongly + recommended to set the URL charset to utf-8:: + + class MyRequest(DynamicCharsetRequestMixin, Request): + url_charset = 'utf-8' + + .. versionadded:: 0.6 + """ + + #: the default charset that is assumed if the content type header + #: is missing or does not contain a charset parameter. The default + #: is latin1 which is what HTTP specifies as default charset. + #: You may however want to set this to utf-8 to better support + #: browsers that do not transmit a charset for incoming data. + default_charset = 'latin1' + + def unknown_charset(self, charset): + """Called if a charset was provided but is not supported by + the Python codecs module. By default latin1 is assumed then + to not lose any information, you may override this method to + change the behavior. + + :param charset: the charset that was not found. + :return: the replacement charset. + """ + return 'latin1' + + @cached_property + def charset(self): + """The charset from the content type.""" + header = self.environ.get('CONTENT_TYPE') + if header: + ct, options = parse_options_header(header) + charset = options.get('charset') + if charset: + if is_known_charset(charset): + return charset + return self.unknown_charset(charset) + return self.default_charset + + +class DynamicCharsetResponseMixin(object): + + """If this mixin is mixed into a response class it will provide + a dynamic `charset` attribute. This means that if the charset is + looked up and stored in the `Content-Type` header and updates + itself automatically. This also means a small performance hit but + can be useful if you're working with different charsets on + responses. + + Because the charset attribute is no a property at class-level, the + default value is stored in `default_charset`. + + Because it changes the behavior or :class:`Response` this class has + to be mixed in *before* the actual response class:: + + class MyResponse(DynamicCharsetResponseMixin, Response): + pass + + .. versionadded:: 0.6 + """ + + #: the default charset. + default_charset = 'utf-8' + + def _get_charset(self): + header = self.headers.get('content-type') + if header: + charset = parse_options_header(header)[1].get('charset') + if charset: + return charset + return self.default_charset + + def _set_charset(self, charset): + header = self.headers.get('content-type') + ct, options = parse_options_header(header) + if not ct: + raise TypeError('Cannot set charset if Content-Type ' + 'header is missing.') + options['charset'] = charset + self.headers['Content-Type'] = dump_options_header(ct, options) + + charset = property(_get_charset, _set_charset, doc=""" + The charset for the response. It's stored inside the + Content-Type header as a parameter.""") + del _get_charset, _set_charset diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/datastructures.py b/flask/venv/lib/python3.6/site-packages/werkzeug/datastructures.py new file mode 100644 index 0000000..9624599 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/datastructures.py @@ -0,0 +1,2762 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.datastructures + ~~~~~~~~~~~~~~~~~~~~~~~ + + This module provides mixins and classes with an immutable interface. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import codecs +import mimetypes +from copy import deepcopy +from itertools import repeat +from collections import Container, Iterable, MutableSet + +from werkzeug._internal import _missing, _empty_stream +from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \ + PY2, text_type, integer_types, string_types, make_literal_wrapper, \ + to_native +from werkzeug.filesystem import get_filesystem_encoding + + +_locale_delim_re = re.compile(r'[_-]') + + +def is_immutable(self): + raise TypeError('%r objects are immutable' % self.__class__.__name__) + + +def iter_multi_items(mapping): + """Iterates over the items of a mapping yielding keys and values + without dropping any from more complex structures. + """ + if isinstance(mapping, MultiDict): + for item in iteritems(mapping, multi=True): + yield item + elif isinstance(mapping, dict): + for key, value in iteritems(mapping): + if isinstance(value, (tuple, list)): + for value in value: + yield key, value + else: + yield key, value + else: + for item in mapping: + yield item + + +def native_itermethods(names): + if not PY2: + return lambda x: x + + def setviewmethod(cls, name): + viewmethod_name = 'view%s' % name + viewmethod = lambda self, *a, **kw: ViewItems(self, name, 'view_%s' % name, *a, **kw) + viewmethod.__doc__ = \ + '"""`%s()` object providing a view on %s"""' % (viewmethod_name, name) + setattr(cls, viewmethod_name, viewmethod) + + def setitermethod(cls, name): + itermethod = getattr(cls, name) + setattr(cls, 'iter%s' % name, itermethod) + listmethod = lambda self, *a, **kw: list(itermethod(self, *a, **kw)) + listmethod.__doc__ = \ + 'Like :py:meth:`iter%s`, but returns a list.' % name + setattr(cls, name, listmethod) + + def wrap(cls): + for name in names: + setitermethod(cls, name) + setviewmethod(cls, name) + return cls + return wrap + + +class ImmutableListMixin(object): + + """Makes a :class:`list` immutable. + + .. versionadded:: 0.5 + + :private: + """ + + _hash_cache = None + + def __hash__(self): + if self._hash_cache is not None: + return self._hash_cache + rv = self._hash_cache = hash(tuple(self)) + return rv + + def __reduce_ex__(self, protocol): + return type(self), (list(self),) + + def __delitem__(self, key): + is_immutable(self) + + def __iadd__(self, other): + is_immutable(self) + __imul__ = __iadd__ + + def __setitem__(self, key, value): + is_immutable(self) + + def append(self, item): + is_immutable(self) + remove = append + + def extend(self, iterable): + is_immutable(self) + + def insert(self, pos, value): + is_immutable(self) + + def pop(self, index=-1): + is_immutable(self) + + def reverse(self): + is_immutable(self) + + def sort(self, cmp=None, key=None, reverse=None): + is_immutable(self) + + +class ImmutableList(ImmutableListMixin, list): + + """An immutable :class:`list`. + + .. versionadded:: 0.5 + + :private: + """ + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + list.__repr__(self), + ) + + +class ImmutableDictMixin(object): + + """Makes a :class:`dict` immutable. + + .. versionadded:: 0.5 + + :private: + """ + _hash_cache = None + + @classmethod + def fromkeys(cls, keys, value=None): + instance = super(cls, cls).__new__(cls) + instance.__init__(zip(keys, repeat(value))) + return instance + + def __reduce_ex__(self, protocol): + return type(self), (dict(self),) + + def _iter_hashitems(self): + return iteritems(self) + + def __hash__(self): + if self._hash_cache is not None: + return self._hash_cache + rv = self._hash_cache = hash(frozenset(self._iter_hashitems())) + return rv + + def setdefault(self, key, default=None): + is_immutable(self) + + def update(self, *args, **kwargs): + is_immutable(self) + + def pop(self, key, default=None): + is_immutable(self) + + def popitem(self): + is_immutable(self) + + def __setitem__(self, key, value): + is_immutable(self) + + def __delitem__(self, key): + is_immutable(self) + + def clear(self): + is_immutable(self) + + +class ImmutableMultiDictMixin(ImmutableDictMixin): + + """Makes a :class:`MultiDict` immutable. + + .. versionadded:: 0.5 + + :private: + """ + + def __reduce_ex__(self, protocol): + return type(self), (list(iteritems(self, multi=True)),) + + def _iter_hashitems(self): + return iteritems(self, multi=True) + + def add(self, key, value): + is_immutable(self) + + def popitemlist(self): + is_immutable(self) + + def poplist(self, key): + is_immutable(self) + + def setlist(self, key, new_list): + is_immutable(self) + + def setlistdefault(self, key, default_list=None): + is_immutable(self) + + +class UpdateDictMixin(object): + + """Makes dicts call `self.on_update` on modifications. + + .. versionadded:: 0.5 + + :private: + """ + + on_update = None + + def calls_update(name): + def oncall(self, *args, **kw): + rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw) + if self.on_update is not None: + self.on_update(self) + return rv + oncall.__name__ = name + return oncall + + def setdefault(self, key, default=None): + modified = key not in self + rv = super(UpdateDictMixin, self).setdefault(key, default) + if modified and self.on_update is not None: + self.on_update(self) + return rv + + def pop(self, key, default=_missing): + modified = key in self + if default is _missing: + rv = super(UpdateDictMixin, self).pop(key) + else: + rv = super(UpdateDictMixin, self).pop(key, default) + if modified and self.on_update is not None: + self.on_update(self) + return rv + + __setitem__ = calls_update('__setitem__') + __delitem__ = calls_update('__delitem__') + clear = calls_update('clear') + popitem = calls_update('popitem') + update = calls_update('update') + del calls_update + + +class TypeConversionDict(dict): + + """Works like a regular dict but the :meth:`get` method can perform + type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` + are subclasses of this class and provide the same feature. + + .. versionadded:: 0.5 + """ + + def get(self, key, default=None, type=None): + """Return the default value if the requested data doesn't exist. + If `type` is provided and is a callable it should convert the value, + return it or raise a :exc:`ValueError` if that is not possible. In + this case the function will return the default as if the value was not + found: + + >>> d = TypeConversionDict(foo='42', bar='blub') + >>> d.get('foo', type=int) + 42 + >>> d.get('bar', -1, type=int) + -1 + + :param key: The key to be looked up. + :param default: The default value to be returned if the key can't + be looked up. If not further specified `None` is + returned. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the default value is returned. + """ + try: + rv = self[key] + except KeyError: + return default + if type is not None: + try: + rv = type(rv) + except ValueError: + rv = default + return rv + + +class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): + + """Works like a :class:`TypeConversionDict` but does not support + modifications. + + .. versionadded:: 0.5 + """ + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return TypeConversionDict(self) + + def __copy__(self): + return self + + +class ViewItems(object): + + def __init__(self, multi_dict, method, repr_name, *a, **kw): + self.__multi_dict = multi_dict + self.__method = method + self.__repr_name = repr_name + self.__a = a + self.__kw = kw + + def __get_items(self): + return getattr(self.__multi_dict, self.__method)(*self.__a, **self.__kw) + + def __repr__(self): + return '%s(%r)' % (self.__repr_name, list(self.__get_items())) + + def __iter__(self): + return iter(self.__get_items()) + + +@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) +class MultiDict(TypeConversionDict): + + """A :class:`MultiDict` is a dictionary subclass customized to deal with + multiple values for the same key which is for example used by the parsing + functions in the wrappers. This is necessary because some HTML form + elements pass multiple values for the same key. + + :class:`MultiDict` implements all standard dictionary methods. + Internally, it saves all values for a key as a list, but the standard dict + access methods will only return the first value for a key. If you want to + gain access to the other values, too, you have to use the `list` methods as + explained below. + + Basic Usage: + + >>> d = MultiDict([('a', 'b'), ('a', 'c')]) + >>> d + MultiDict([('a', 'b'), ('a', 'c')]) + >>> d['a'] + 'b' + >>> d.getlist('a') + ['b', 'c'] + >>> 'a' in d + True + + It behaves like a normal dict thus all dict functions will only return the + first value when multiple values for one key are found. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP + exceptions. + + A :class:`MultiDict` can be constructed from an iterable of + ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2 + onwards some keyword parameters. + + :param mapping: the initial value for the :class:`MultiDict`. Either a + regular dict, an iterable of ``(key, value)`` tuples + or `None`. + """ + + def __init__(self, mapping=None): + if isinstance(mapping, MultiDict): + dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping))) + elif isinstance(mapping, dict): + tmp = {} + for key, value in iteritems(mapping): + if isinstance(value, (tuple, list)): + if len(value) == 0: + continue + value = list(value) + else: + value = [value] + tmp[key] = value + dict.__init__(self, tmp) + else: + tmp = {} + for key, value in mapping or (): + tmp.setdefault(key, []).append(value) + dict.__init__(self, tmp) + + def __getstate__(self): + return dict(self.lists()) + + def __setstate__(self, value): + dict.clear(self) + dict.update(self, value) + + def __getitem__(self, key): + """Return the first data value for this key; + raises KeyError if not found. + + :param key: The key to be looked up. + :raise KeyError: if the key does not exist. + """ + if key in self: + lst = dict.__getitem__(self, key) + if len(lst) > 0: + return lst[0] + raise exceptions.BadRequestKeyError(key) + + def __setitem__(self, key, value): + """Like :meth:`add` but removes an existing key first. + + :param key: the key for the value. + :param value: the value to set. + """ + dict.__setitem__(self, key, [value]) + + def add(self, key, value): + """Adds a new value for the key. + + .. versionadded:: 0.6 + + :param key: the key for the value. + :param value: the value to add. + """ + dict.setdefault(self, key, []).append(value) + + def getlist(self, key, type=None): + """Return the list of items for a given key. If that key is not in the + `MultiDict`, the return value will be an empty list. Just as `get` + `getlist` accepts a `type` parameter. All items will be converted + with the callable defined there. + + :param key: The key to be looked up. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the value will be removed from the list. + :return: a :class:`list` of all the values for the key. + """ + try: + rv = dict.__getitem__(self, key) + except KeyError: + return [] + if type is None: + return list(rv) + result = [] + for item in rv: + try: + result.append(type(item)) + except ValueError: + pass + return result + + def setlist(self, key, new_list): + """Remove the old values for a key and add new ones. Note that the list + you pass the values in will be shallow-copied before it is inserted in + the dictionary. + + >>> d = MultiDict() + >>> d.setlist('foo', ['1', '2']) + >>> d['foo'] + '1' + >>> d.getlist('foo') + ['1', '2'] + + :param key: The key for which the values are set. + :param new_list: An iterable with the new values for the key. Old values + are removed first. + """ + dict.__setitem__(self, key, list(new_list)) + + def setdefault(self, key, default=None): + """Returns the value for the key if it is in the dict, otherwise it + returns `default` and sets that value for `key`. + + :param key: The key to be looked up. + :param default: The default value to be returned if the key is not + in the dict. If not further specified it's `None`. + """ + if key not in self: + self[key] = default + else: + default = self[key] + return default + + def setlistdefault(self, key, default_list=None): + """Like `setdefault` but sets multiple values. The list returned + is not a copy, but the list that is actually used internally. This + means that you can put new values into the dict by appending items + to the list: + + >>> d = MultiDict({"foo": 1}) + >>> d.setlistdefault("foo").extend([2, 3]) + >>> d.getlist("foo") + [1, 2, 3] + + :param key: The key to be looked up. + :param default_list: An iterable of default values. It is either copied + (in case it was a list) or converted into a list + before returned. + :return: a :class:`list` + """ + if key not in self: + default_list = list(default_list or ()) + dict.__setitem__(self, key, default_list) + else: + default_list = dict.__getitem__(self, key) + return default_list + + def items(self, multi=False): + """Return an iterator of ``(key, value)`` pairs. + + :param multi: If set to `True` the iterator returned will have a pair + for each value of each key. Otherwise it will only + contain pairs for the first value of each key. + """ + + for key, values in iteritems(dict, self): + if multi: + for value in values: + yield key, value + else: + yield key, values[0] + + def lists(self): + """Return a list of ``(key, values)`` pairs, where values is the list + of all values associated with the key.""" + + for key, values in iteritems(dict, self): + yield key, list(values) + + def keys(self): + return iterkeys(dict, self) + + __iter__ = keys + + def values(self): + """Returns an iterator of the first value on every key's value list.""" + for values in itervalues(dict, self): + yield values[0] + + def listvalues(self): + """Return an iterator of all values associated with a key. Zipping + :meth:`keys` and this is the same as calling :meth:`lists`: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> zip(d.keys(), d.listvalues()) == d.lists() + True + """ + + return itervalues(dict, self) + + def copy(self): + """Return a shallow copy of this object.""" + return self.__class__(self) + + def deepcopy(self, memo=None): + """Return a deep copy of this object.""" + return self.__class__(deepcopy(self.to_dict(flat=False), memo)) + + def to_dict(self, flat=True): + """Return the contents as regular dict. If `flat` is `True` the + returned dict will only have the first item present, if `flat` is + `False` all values will be returned as lists. + + :param flat: If set to `False` the dict returned will have lists + with all the values in it. Otherwise it will only + contain the first value for each key. + :return: a :class:`dict` + """ + if flat: + return dict(iteritems(self)) + return dict(self.lists()) + + def update(self, other_dict): + """update() extends rather than replaces existing key lists: + + >>> a = MultiDict({'x': 1}) + >>> b = MultiDict({'x': 2, 'y': 3}) + >>> a.update(b) + >>> a + MultiDict([('y', 3), ('x', 1), ('x', 2)]) + + If the value list for a key in ``other_dict`` is empty, no new values + will be added to the dict and the key will not be created: + + >>> x = {'empty_list': []} + >>> y = MultiDict() + >>> y.update(x) + >>> y + MultiDict([]) + """ + for key, value in iter_multi_items(other_dict): + MultiDict.add(self, key, value) + + def pop(self, key, default=_missing): + """Pop the first item for a list on the dict. Afterwards the + key is removed from the dict, so additional values are discarded: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> d.pop("foo") + 1 + >>> "foo" in d + False + + :param key: the key to pop. + :param default: if provided the value to return if the key was + not in the dictionary. + """ + try: + lst = dict.pop(self, key) + + if len(lst) == 0: + raise exceptions.BadRequestKeyError() + + return lst[0] + except KeyError as e: + if default is not _missing: + return default + raise exceptions.BadRequestKeyError(str(e)) + + def popitem(self): + """Pop an item from the dict.""" + try: + item = dict.popitem(self) + + if len(item[1]) == 0: + raise exceptions.BadRequestKeyError() + + return (item[0], item[1][0]) + except KeyError as e: + raise exceptions.BadRequestKeyError(str(e)) + + def poplist(self, key): + """Pop the list for a key from the dict. If the key is not in the dict + an empty list is returned. + + .. versionchanged:: 0.5 + If the key does no longer exist a list is returned instead of + raising an error. + """ + return dict.pop(self, key, []) + + def popitemlist(self): + """Pop a ``(key, list)`` tuple from the dict.""" + try: + return dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(str(e)) + + def __copy__(self): + return self.copy() + + def __deepcopy__(self, memo): + return self.deepcopy(memo=memo) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, list(iteritems(self, multi=True))) + + +class _omd_bucket(object): + + """Wraps values in the :class:`OrderedMultiDict`. This makes it + possible to keep an order over multiple different keys. It requires + a lot of extra memory and slows down access a lot, but makes it + possible to access elements in O(1) and iterate in O(n). + """ + __slots__ = ('prev', 'key', 'value', 'next') + + def __init__(self, omd, key, value): + self.prev = omd._last_bucket + self.key = key + self.value = value + self.next = None + + if omd._first_bucket is None: + omd._first_bucket = self + if omd._last_bucket is not None: + omd._last_bucket.next = self + omd._last_bucket = self + + def unlink(self, omd): + if self.prev: + self.prev.next = self.next + if self.next: + self.next.prev = self.prev + if omd._first_bucket is self: + omd._first_bucket = self.next + if omd._last_bucket is self: + omd._last_bucket = self.prev + + +@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) +class OrderedMultiDict(MultiDict): + + """Works like a regular :class:`MultiDict` but preserves the + order of the fields. To convert the ordered multi dict into a + list you can use the :meth:`items` method and pass it ``multi=True``. + + In general an :class:`OrderedMultiDict` is an order of magnitude + slower than a :class:`MultiDict`. + + .. admonition:: note + + Due to a limitation in Python you cannot convert an ordered + multi dict into a regular dict by using ``dict(multidict)``. + Instead you have to use the :meth:`to_dict` method, otherwise + the internal bucket objects are exposed. + """ + + def __init__(self, mapping=None): + dict.__init__(self) + self._first_bucket = self._last_bucket = None + if mapping is not None: + OrderedMultiDict.update(self, mapping) + + def __eq__(self, other): + if not isinstance(other, MultiDict): + return NotImplemented + if isinstance(other, OrderedMultiDict): + iter1 = iteritems(self, multi=True) + iter2 = iteritems(other, multi=True) + try: + for k1, v1 in iter1: + k2, v2 = next(iter2) + if k1 != k2 or v1 != v2: + return False + except StopIteration: + return False + try: + next(iter2) + except StopIteration: + return True + return False + if len(self) != len(other): + return False + for key, values in iterlists(self): + if other.getlist(key) != values: + return False + return True + + __hash__ = None + + def __ne__(self, other): + return not self.__eq__(other) + + def __reduce_ex__(self, protocol): + return type(self), (list(iteritems(self, multi=True)),) + + def __getstate__(self): + return list(iteritems(self, multi=True)) + + def __setstate__(self, values): + dict.clear(self) + for key, value in values: + self.add(key, value) + + def __getitem__(self, key): + if key in self: + return dict.__getitem__(self, key)[0].value + raise exceptions.BadRequestKeyError(key) + + def __setitem__(self, key, value): + self.poplist(key) + self.add(key, value) + + def __delitem__(self, key): + self.pop(key) + + def keys(self): + return (key for key, value in iteritems(self)) + + __iter__ = keys + + def values(self): + return (value for key, value in iteritems(self)) + + def items(self, multi=False): + ptr = self._first_bucket + if multi: + while ptr is not None: + yield ptr.key, ptr.value + ptr = ptr.next + else: + returned_keys = set() + while ptr is not None: + if ptr.key not in returned_keys: + returned_keys.add(ptr.key) + yield ptr.key, ptr.value + ptr = ptr.next + + def lists(self): + returned_keys = set() + ptr = self._first_bucket + while ptr is not None: + if ptr.key not in returned_keys: + yield ptr.key, self.getlist(ptr.key) + returned_keys.add(ptr.key) + ptr = ptr.next + + def listvalues(self): + for key, values in iterlists(self): + yield values + + def add(self, key, value): + dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) + + def getlist(self, key, type=None): + try: + rv = dict.__getitem__(self, key) + except KeyError: + return [] + if type is None: + return [x.value for x in rv] + result = [] + for item in rv: + try: + result.append(type(item.value)) + except ValueError: + pass + return result + + def setlist(self, key, new_list): + self.poplist(key) + for value in new_list: + self.add(key, value) + + def setlistdefault(self, key, default_list=None): + raise TypeError('setlistdefault is unsupported for ' + 'ordered multi dicts') + + def update(self, mapping): + for key, value in iter_multi_items(mapping): + OrderedMultiDict.add(self, key, value) + + def poplist(self, key): + buckets = dict.pop(self, key, ()) + for bucket in buckets: + bucket.unlink(self) + return [x.value for x in buckets] + + def pop(self, key, default=_missing): + try: + buckets = dict.pop(self, key) + except KeyError as e: + if default is not _missing: + return default + raise exceptions.BadRequestKeyError(str(e)) + for bucket in buckets: + bucket.unlink(self) + return buckets[0].value + + def popitem(self): + try: + key, buckets = dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(str(e)) + for bucket in buckets: + bucket.unlink(self) + return key, buckets[0].value + + def popitemlist(self): + try: + key, buckets = dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(str(e)) + for bucket in buckets: + bucket.unlink(self) + return key, [x.value for x in buckets] + + +def _options_header_vkw(value, kw): + return dump_options_header(value, dict((k.replace('_', '-'), v) + for k, v in kw.items())) + + +def _unicodify_header_value(value): + if isinstance(value, bytes): + value = value.decode('latin-1') + if not isinstance(value, text_type): + value = text_type(value) + return value + + +@native_itermethods(['keys', 'values', 'items']) +class Headers(object): + + """An object that stores some headers. It has a dict-like interface + but is ordered and can store the same keys multiple times. + + This data structure is useful if you want a nicer way to handle WSGI + headers which are stored as tuples in a list. + + From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is + also a subclass of the :class:`~exceptions.BadRequest` HTTP exception + and will render a page for a ``400 BAD REQUEST`` if caught in a + catch-all for HTTP exceptions. + + Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers` + class, with the exception of `__getitem__`. :mod:`wsgiref` will return + `None` for ``headers['missing']``, whereas :class:`Headers` will raise + a :class:`KeyError`. + + To create a new :class:`Headers` object pass it a list or dict of headers + which are used as default values. This does not reuse the list passed + to the constructor for internal usage. + + :param defaults: The list of default values for the :class:`Headers`. + + .. versionchanged:: 0.9 + This data structure now stores unicode values similar to how the + multi dicts do it. The main difference is that bytes can be set as + well which will automatically be latin1 decoded. + + .. versionchanged:: 0.9 + The :meth:`linked` function was removed without replacement as it + was an API that does not support the changes to the encoding model. + """ + + def __init__(self, defaults=None): + self._list = [] + if defaults is not None: + if isinstance(defaults, (list, Headers)): + self._list.extend(defaults) + else: + self.extend(defaults) + + def __getitem__(self, key, _get_mode=False): + if not _get_mode: + if isinstance(key, integer_types): + return self._list[key] + elif isinstance(key, slice): + return self.__class__(self._list[key]) + if not isinstance(key, string_types): + raise exceptions.BadRequestKeyError(key) + ikey = key.lower() + for k, v in self._list: + if k.lower() == ikey: + return v + # micro optimization: if we are in get mode we will catch that + # exception one stack level down so we can raise a standard + # key error instead of our special one. + if _get_mode: + raise KeyError() + raise exceptions.BadRequestKeyError(key) + + def __eq__(self, other): + return other.__class__ is self.__class__ and \ + set(other._list) == set(self._list) + + __hash__ = None + + def __ne__(self, other): + return not self.__eq__(other) + + def get(self, key, default=None, type=None, as_bytes=False): + """Return the default value if the requested data doesn't exist. + If `type` is provided and is a callable it should convert the value, + return it or raise a :exc:`ValueError` if that is not possible. In + this case the function will return the default as if the value was not + found: + + >>> d = Headers([('Content-Length', '42')]) + >>> d.get('Content-Length', type=int) + 42 + + If a headers object is bound you must not add unicode strings + because no encoding takes place. + + .. versionadded:: 0.9 + Added support for `as_bytes`. + + :param key: The key to be looked up. + :param default: The default value to be returned if the key can't + be looked up. If not further specified `None` is + returned. + :param type: A callable that is used to cast the value in the + :class:`Headers`. If a :exc:`ValueError` is raised + by this callable the default value is returned. + :param as_bytes: return bytes instead of unicode strings. + """ + try: + rv = self.__getitem__(key, _get_mode=True) + except KeyError: + return default + if as_bytes: + rv = rv.encode('latin1') + if type is None: + return rv + try: + return type(rv) + except ValueError: + return default + + def getlist(self, key, type=None, as_bytes=False): + """Return the list of items for a given key. If that key is not in the + :class:`Headers`, the return value will be an empty list. Just as + :meth:`get` :meth:`getlist` accepts a `type` parameter. All items will + be converted with the callable defined there. + + .. versionadded:: 0.9 + Added support for `as_bytes`. + + :param key: The key to be looked up. + :param type: A callable that is used to cast the value in the + :class:`Headers`. If a :exc:`ValueError` is raised + by this callable the value will be removed from the list. + :return: a :class:`list` of all the values for the key. + :param as_bytes: return bytes instead of unicode strings. + """ + ikey = key.lower() + result = [] + for k, v in self: + if k.lower() == ikey: + if as_bytes: + v = v.encode('latin1') + if type is not None: + try: + v = type(v) + except ValueError: + continue + result.append(v) + return result + + def get_all(self, name): + """Return a list of all the values for the named field. + + This method is compatible with the :mod:`wsgiref` + :meth:`~wsgiref.headers.Headers.get_all` method. + """ + return self.getlist(name) + + def items(self, lower=False): + for key, value in self: + if lower: + key = key.lower() + yield key, value + + def keys(self, lower=False): + for key, _ in iteritems(self, lower): + yield key + + def values(self): + for _, value in iteritems(self): + yield value + + def extend(self, iterable): + """Extend the headers with a dict or an iterable yielding keys and + values. + """ + if isinstance(iterable, dict): + for key, value in iteritems(iterable): + if isinstance(value, (tuple, list)): + for v in value: + self.add(key, v) + else: + self.add(key, value) + else: + for key, value in iterable: + self.add(key, value) + + def __delitem__(self, key, _index_operation=True): + if _index_operation and isinstance(key, (integer_types, slice)): + del self._list[key] + return + key = key.lower() + new = [] + for k, v in self._list: + if k.lower() != key: + new.append((k, v)) + self._list[:] = new + + def remove(self, key): + """Remove a key. + + :param key: The key to be removed. + """ + return self.__delitem__(key, _index_operation=False) + + def pop(self, key=None, default=_missing): + """Removes and returns a key or index. + + :param key: The key to be popped. If this is an integer the item at + that position is removed, if it's a string the value for + that key is. If the key is omitted or `None` the last + item is removed. + :return: an item. + """ + if key is None: + return self._list.pop() + if isinstance(key, integer_types): + return self._list.pop(key) + try: + rv = self[key] + self.remove(key) + except KeyError: + if default is not _missing: + return default + raise + return rv + + def popitem(self): + """Removes a key or index and returns a (key, value) item.""" + return self.pop() + + def __contains__(self, key): + """Check if a key is present.""" + try: + self.__getitem__(key, _get_mode=True) + except KeyError: + return False + return True + + has_key = __contains__ + + def __iter__(self): + """Yield ``(key, value)`` tuples.""" + return iter(self._list) + + def __len__(self): + return len(self._list) + + def add(self, _key, _value, **kw): + """Add a new header tuple to the list. + + Keyword arguments can specify additional parameters for the header + value, with underscores converted to dashes:: + + >>> d = Headers() + >>> d.add('Content-Type', 'text/plain') + >>> d.add('Content-Disposition', 'attachment', filename='foo.png') + + The keyword argument dumping uses :func:`dump_options_header` + behind the scenes. + + .. versionadded:: 0.4.1 + keyword arguments were added for :mod:`wsgiref` compatibility. + """ + if kw: + _value = _options_header_vkw(_value, kw) + _value = _unicodify_header_value(_value) + self._validate_value(_value) + self._list.append((_key, _value)) + + def _validate_value(self, value): + if not isinstance(value, text_type): + raise TypeError('Value should be unicode.') + if u'\n' in value or u'\r' in value: + raise ValueError('Detected newline in header value. This is ' + 'a potential security problem') + + def add_header(self, _key, _value, **_kw): + """Add a new header tuple to the list. + + An alias for :meth:`add` for compatibility with the :mod:`wsgiref` + :meth:`~wsgiref.headers.Headers.add_header` method. + """ + self.add(_key, _value, **_kw) + + def clear(self): + """Clears all headers.""" + del self._list[:] + + def set(self, _key, _value, **kw): + """Remove all header tuples for `key` and add a new one. The newly + added key either appears at the end of the list if there was no + entry or replaces the first one. + + Keyword arguments can specify additional parameters for the header + value, with underscores converted to dashes. See :meth:`add` for + more information. + + .. versionchanged:: 0.6.1 + :meth:`set` now accepts the same arguments as :meth:`add`. + + :param key: The key to be inserted. + :param value: The value to be inserted. + """ + if kw: + _value = _options_header_vkw(_value, kw) + _value = _unicodify_header_value(_value) + self._validate_value(_value) + if not self._list: + self._list.append((_key, _value)) + return + listiter = iter(self._list) + ikey = _key.lower() + for idx, (old_key, old_value) in enumerate(listiter): + if old_key.lower() == ikey: + # replace first ocurrence + self._list[idx] = (_key, _value) + break + else: + self._list.append((_key, _value)) + return + self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey] + + def setdefault(self, key, default): + """Returns the value for the key if it is in the dict, otherwise it + returns `default` and sets that value for `key`. + + :param key: The key to be looked up. + :param default: The default value to be returned if the key is not + in the dict. If not further specified it's `None`. + """ + if key in self: + return self[key] + self.set(key, default) + return default + + def __setitem__(self, key, value): + """Like :meth:`set` but also supports index/slice based setting.""" + if isinstance(key, (slice, integer_types)): + if isinstance(key, integer_types): + value = [value] + value = [(k, _unicodify_header_value(v)) for (k, v) in value] + [self._validate_value(v) for (k, v) in value] + if isinstance(key, integer_types): + self._list[key] = value[0] + else: + self._list[key] = value + else: + self.set(key, value) + + def to_list(self, charset='iso-8859-1'): + """Convert the headers into a list suitable for WSGI.""" + from warnings import warn + warn(DeprecationWarning('Method removed, use to_wsgi_list instead'), + stacklevel=2) + return self.to_wsgi_list() + + def to_wsgi_list(self): + """Convert the headers into a list suitable for WSGI. + + The values are byte strings in Python 2 converted to latin1 and unicode + strings in Python 3 for the WSGI server to encode. + + :return: list + """ + if PY2: + return [(to_native(k), v.encode('latin1')) for k, v in self] + return list(self) + + def copy(self): + return self.__class__(self._list) + + def __copy__(self): + return self.copy() + + def __str__(self): + """Returns formatted headers suitable for HTTP transmission.""" + strs = [] + for key, value in self.to_wsgi_list(): + strs.append('%s: %s' % (key, value)) + strs.append('\r\n') + return '\r\n'.join(strs) + + def __repr__(self): + return '%s(%r)' % ( + self.__class__.__name__, + list(self) + ) + + +class ImmutableHeadersMixin(object): + + """Makes a :class:`Headers` immutable. We do not mark them as + hashable though since the only usecase for this datastructure + in Werkzeug is a view on a mutable structure. + + .. versionadded:: 0.5 + + :private: + """ + + def __delitem__(self, key): + is_immutable(self) + + def __setitem__(self, key, value): + is_immutable(self) + set = __setitem__ + + def add(self, item): + is_immutable(self) + remove = add_header = add + + def extend(self, iterable): + is_immutable(self) + + def insert(self, pos, value): + is_immutable(self) + + def pop(self, index=-1): + is_immutable(self) + + def popitem(self): + is_immutable(self) + + def setdefault(self, key, default): + is_immutable(self) + + +class EnvironHeaders(ImmutableHeadersMixin, Headers): + + """Read only version of the headers from a WSGI environment. This + provides the same interface as `Headers` and is constructed from + a WSGI environment. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for + HTTP exceptions. + """ + + def __init__(self, environ): + self.environ = environ + + def __eq__(self, other): + return self.environ is other.environ + + __hash__ = None + + def __getitem__(self, key, _get_mode=False): + # _get_mode is a no-op for this class as there is no index but + # used because get() calls it. + if not isinstance(key, string_types): + raise KeyError(key) + key = key.upper().replace('-', '_') + if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): + return _unicodify_header_value(self.environ[key]) + return _unicodify_header_value(self.environ['HTTP_' + key]) + + def __len__(self): + # the iter is necessary because otherwise list calls our + # len which would call list again and so forth. + return len(list(iter(self))) + + def __iter__(self): + for key, value in iteritems(self.environ): + if key.startswith('HTTP_') and key not in \ + ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): + yield (key[5:].replace('_', '-').title(), + _unicodify_header_value(value)) + elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH') and value: + yield (key.replace('_', '-').title(), + _unicodify_header_value(value)) + + def copy(self): + raise TypeError('cannot create %r copies' % self.__class__.__name__) + + +@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) +class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): + + """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict` + instances as sequence and it will combine the return values of all wrapped + dicts: + + >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict + >>> post = MultiDict([('foo', 'bar')]) + >>> get = MultiDict([('blub', 'blah')]) + >>> combined = CombinedMultiDict([get, post]) + >>> combined['foo'] + 'bar' + >>> combined['blub'] + 'blah' + + This works for all read operations and will raise a `TypeError` for + methods that usually change data which isn't possible. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP + exceptions. + """ + + def __reduce_ex__(self, protocol): + return type(self), (self.dicts,) + + def __init__(self, dicts=None): + self.dicts = dicts or [] + + @classmethod + def fromkeys(cls): + raise TypeError('cannot create %r instances by fromkeys' % + cls.__name__) + + def __getitem__(self, key): + for d in self.dicts: + if key in d: + return d[key] + raise exceptions.BadRequestKeyError(key) + + def get(self, key, default=None, type=None): + for d in self.dicts: + if key in d: + if type is not None: + try: + return type(d[key]) + except ValueError: + continue + return d[key] + return default + + def getlist(self, key, type=None): + rv = [] + for d in self.dicts: + rv.extend(d.getlist(key, type)) + return rv + + def _keys_impl(self): + """This function exists so __len__ can be implemented more efficiently, + saving one list creation from an iterator. + + Using this for Python 2's ``dict.keys`` behavior would be useless since + `dict.keys` in Python 2 returns a list, while we have a set here. + """ + rv = set() + for d in self.dicts: + rv.update(iterkeys(d)) + return rv + + def keys(self): + return iter(self._keys_impl()) + + __iter__ = keys + + def items(self, multi=False): + found = set() + for d in self.dicts: + for key, value in iteritems(d, multi): + if multi: + yield key, value + elif key not in found: + found.add(key) + yield key, value + + def values(self): + for key, value in iteritems(self): + yield value + + def lists(self): + rv = {} + for d in self.dicts: + for key, values in iterlists(d): + rv.setdefault(key, []).extend(values) + return iteritems(rv) + + def listvalues(self): + return (x[1] for x in self.lists()) + + def copy(self): + """Return a shallow copy of this object.""" + return self.__class__(self.dicts[:]) + + def to_dict(self, flat=True): + """Return the contents as regular dict. If `flat` is `True` the + returned dict will only have the first item present, if `flat` is + `False` all values will be returned as lists. + + :param flat: If set to `False` the dict returned will have lists + with all the values in it. Otherwise it will only + contain the first item for each key. + :return: a :class:`dict` + """ + rv = {} + for d in reversed(self.dicts): + rv.update(d.to_dict(flat)) + return rv + + def __len__(self): + return len(self._keys_impl()) + + def __contains__(self, key): + for d in self.dicts: + if key in d: + return True + return False + + has_key = __contains__ + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.dicts) + + +class FileMultiDict(MultiDict): + + """A special :class:`MultiDict` that has convenience methods to add + files to it. This is used for :class:`EnvironBuilder` and generally + useful for unittesting. + + .. versionadded:: 0.5 + """ + + def add_file(self, name, file, filename=None, content_type=None): + """Adds a new file to the dict. `file` can be a file name or + a :class:`file`-like or a :class:`FileStorage` object. + + :param name: the name of the field. + :param file: a filename or :class:`file`-like object + :param filename: an optional filename + :param content_type: an optional content type + """ + if isinstance(file, FileStorage): + value = file + else: + if isinstance(file, string_types): + if filename is None: + filename = file + file = open(file, 'rb') + if filename and content_type is None: + content_type = mimetypes.guess_type(filename)[0] or \ + 'application/octet-stream' + value = FileStorage(file, filename, name, content_type) + + self.add(name, value) + + +class ImmutableDict(ImmutableDictMixin, dict): + + """An immutable :class:`dict`. + + .. versionadded:: 0.5 + """ + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + dict.__repr__(self), + ) + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return dict(self) + + def __copy__(self): + return self + + +class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): + + """An immutable :class:`MultiDict`. + + .. versionadded:: 0.5 + """ + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return MultiDict(self) + + def __copy__(self): + return self + + +class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): + + """An immutable :class:`OrderedMultiDict`. + + .. versionadded:: 0.6 + """ + + def _iter_hashitems(self): + return enumerate(iteritems(self, multi=True)) + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return OrderedMultiDict(self) + + def __copy__(self): + return self + + +@native_itermethods(['values']) +class Accept(ImmutableList): + + """An :class:`Accept` object is just a list subclass for lists of + ``(value, quality)`` tuples. It is automatically sorted by specificity + and quality. + + All :class:`Accept` objects work similar to a list but provide extra + functionality for working with the data. Containment checks are + normalized to the rules of that header: + + >>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)]) + >>> a.best + 'ISO-8859-1' + >>> 'iso-8859-1' in a + True + >>> 'UTF8' in a + True + >>> 'utf7' in a + False + + To get the quality for an item you can use normal item lookup: + + >>> print a['utf-8'] + 0.7 + >>> a['utf7'] + 0 + + .. versionchanged:: 0.5 + :class:`Accept` objects are forced immutable now. + """ + + def __init__(self, values=()): + if values is None: + list.__init__(self) + self.provided = False + elif isinstance(values, Accept): + self.provided = values.provided + list.__init__(self, values) + else: + self.provided = True + values = sorted(values, key=lambda x: (self._specificity(x[0]), x[1], x[0]), + reverse=True) + list.__init__(self, values) + + def _specificity(self, value): + """Returns a tuple describing the value's specificity.""" + return value != '*', + + def _value_matches(self, value, item): + """Check if a value matches a given accept item.""" + return item == '*' or item.lower() == value.lower() + + def __getitem__(self, key): + """Besides index lookup (getting item n) you can also pass it a string + to get the quality for the item. If the item is not in the list, the + returned quality is ``0``. + """ + if isinstance(key, string_types): + return self.quality(key) + return list.__getitem__(self, key) + + def quality(self, key): + """Returns the quality of the key. + + .. versionadded:: 0.6 + In previous versions you had to use the item-lookup syntax + (eg: ``obj[key]`` instead of ``obj.quality(key)``) + """ + for item, quality in self: + if self._value_matches(key, item): + return quality + return 0 + + def __contains__(self, value): + for item, quality in self: + if self._value_matches(value, item): + return True + return False + + def __repr__(self): + return '%s([%s])' % ( + self.__class__.__name__, + ', '.join('(%r, %s)' % (x, y) for x, y in self) + ) + + def index(self, key): + """Get the position of an entry or raise :exc:`ValueError`. + + :param key: The key to be looked up. + + .. versionchanged:: 0.5 + This used to raise :exc:`IndexError`, which was inconsistent + with the list API. + """ + if isinstance(key, string_types): + for idx, (item, quality) in enumerate(self): + if self._value_matches(key, item): + return idx + raise ValueError(key) + return list.index(self, key) + + def find(self, key): + """Get the position of an entry or return -1. + + :param key: The key to be looked up. + """ + try: + return self.index(key) + except ValueError: + return -1 + + def values(self): + """Iterate over all values.""" + for item in self: + yield item[0] + + def to_header(self): + """Convert the header set into an HTTP header string.""" + result = [] + for value, quality in self: + if quality != 1: + value = '%s;q=%s' % (value, quality) + result.append(value) + return ','.join(result) + + def __str__(self): + return self.to_header() + + def _best_single_match(self, match): + for client_item, quality in self: + if self._value_matches(match, client_item): + # self is sorted by specificity descending, we can exit + return client_item, quality + + def best_match(self, matches, default=None): + """Returns the best match from a list of possible matches based + on the specificity and quality of the client. If two items have the + same quality and specificity, the one is returned that comes first. + + :param matches: a list of matches to check for + :param default: the value that is returned if none match + """ + result = default + best_quality = -1 + best_specificity = (-1,) + for server_item in matches: + match = self._best_single_match(server_item) + if not match: + continue + client_item, quality = match + specificity = self._specificity(client_item) + if quality <= 0 or quality < best_quality: + continue + # better quality or same quality but more specific => better match + if quality > best_quality or specificity > best_specificity: + result = server_item + best_quality = quality + best_specificity = specificity + return result + + @property + def best(self): + """The best match as value.""" + if self: + return self[0][0] + + +class MIMEAccept(Accept): + + """Like :class:`Accept` but with special methods and behavior for + mimetypes. + """ + + def _specificity(self, value): + return tuple(x != '*' for x in value.split('/', 1)) + + def _value_matches(self, value, item): + def _normalize(x): + x = x.lower() + return x == '*' and ('*', '*') or x.split('/', 1) + + # this is from the application which is trusted. to avoid developer + # frustration we actually check these for valid values + if '/' not in value: + raise ValueError('invalid mimetype %r' % value) + value_type, value_subtype = _normalize(value) + if value_type == '*' and value_subtype != '*': + raise ValueError('invalid mimetype %r' % value) + + if '/' not in item: + return False + item_type, item_subtype = _normalize(item) + if item_type == '*' and item_subtype != '*': + return False + return ( + (item_type == item_subtype == '*' or + value_type == value_subtype == '*') or + (item_type == value_type and (item_subtype == '*' or + value_subtype == '*' or + item_subtype == value_subtype)) + ) + + @property + def accept_html(self): + """True if this object accepts HTML.""" + return ( + 'text/html' in self or + 'application/xhtml+xml' in self or + self.accept_xhtml + ) + + @property + def accept_xhtml(self): + """True if this object accepts XHTML.""" + return ( + 'application/xhtml+xml' in self or + 'application/xml' in self + ) + + @property + def accept_json(self): + """True if this object accepts JSON.""" + return 'application/json' in self + + +class LanguageAccept(Accept): + + """Like :class:`Accept` but with normalization for languages.""" + + def _value_matches(self, value, item): + def _normalize(language): + return _locale_delim_re.split(language.lower()) + return item == '*' or _normalize(value) == _normalize(item) + + +class CharsetAccept(Accept): + + """Like :class:`Accept` but with normalization for charsets.""" + + def _value_matches(self, value, item): + def _normalize(name): + try: + return codecs.lookup(name).name + except LookupError: + return name.lower() + return item == '*' or _normalize(value) == _normalize(item) + + +def cache_property(key, empty, type): + """Return a new property object for a cache header. Useful if you + want to add support for a cache extension in a subclass.""" + return property(lambda x: x._get_cache_value(key, empty, type), + lambda x, v: x._set_cache_value(key, v, type), + lambda x: x._del_cache_value(key), + 'accessor for %r' % key) + + +class _CacheControl(UpdateDictMixin, dict): + + """Subclass of a dict that stores values for a Cache-Control header. It + has accessors for all the cache-control directives specified in RFC 2616. + The class does not differentiate between request and response directives. + + Because the cache-control directives in the HTTP header use dashes the + python descriptors use underscores for that. + + To get a header of the :class:`CacheControl` object again you can convert + the object into a string or call the :meth:`to_header` method. If you plan + to subclass it and add your own items have a look at the sourcecode for + that class. + + .. versionchanged:: 0.4 + + Setting `no_cache` or `private` to boolean `True` will set the implicit + none-value which is ``*``: + + >>> cc = ResponseCacheControl() + >>> cc.no_cache = True + >>> cc + + >>> cc.no_cache + '*' + >>> cc.no_cache = None + >>> cc + + + In versions before 0.5 the behavior documented here affected the now + no longer existing `CacheControl` class. + """ + + no_cache = cache_property('no-cache', '*', None) + no_store = cache_property('no-store', None, bool) + max_age = cache_property('max-age', -1, int) + no_transform = cache_property('no-transform', None, None) + + def __init__(self, values=(), on_update=None): + dict.__init__(self, values or ()) + self.on_update = on_update + self.provided = values is not None + + def _get_cache_value(self, key, empty, type): + """Used internally by the accessor properties.""" + if type is bool: + return key in self + if key in self: + value = self[key] + if value is None: + return empty + elif type is not None: + try: + value = type(value) + except ValueError: + pass + return value + + def _set_cache_value(self, key, value, type): + """Used internally by the accessor properties.""" + if type is bool: + if value: + self[key] = None + else: + self.pop(key, None) + else: + if value is None: + self.pop(key) + elif value is True: + self[key] = None + else: + self[key] = value + + def _del_cache_value(self, key): + """Used internally by the accessor properties.""" + if key in self: + del self[key] + + def to_header(self): + """Convert the stored values into a cache control header.""" + return dump_header(self) + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, + " ".join( + "%s=%r" % (k, v) for k, v in sorted(self.items()) + ), + ) + + +class RequestCacheControl(ImmutableDictMixin, _CacheControl): + + """A cache control for requests. This is immutable and gives access + to all the request-relevant cache control headers. + + To get a header of the :class:`RequestCacheControl` object again you can + convert the object into a string or call the :meth:`to_header` method. If + you plan to subclass it and add your own items have a look at the sourcecode + for that class. + + .. versionadded:: 0.5 + In previous versions a `CacheControl` class existed that was used + both for request and response. + """ + + max_stale = cache_property('max-stale', '*', int) + min_fresh = cache_property('min-fresh', '*', int) + no_transform = cache_property('no-transform', None, None) + only_if_cached = cache_property('only-if-cached', None, bool) + + +class ResponseCacheControl(_CacheControl): + + """A cache control for responses. Unlike :class:`RequestCacheControl` + this is mutable and gives access to response-relevant cache control + headers. + + To get a header of the :class:`ResponseCacheControl` object again you can + convert the object into a string or call the :meth:`to_header` method. If + you plan to subclass it and add your own items have a look at the sourcecode + for that class. + + .. versionadded:: 0.5 + In previous versions a `CacheControl` class existed that was used + both for request and response. + """ + + public = cache_property('public', None, bool) + private = cache_property('private', '*', None) + must_revalidate = cache_property('must-revalidate', None, bool) + proxy_revalidate = cache_property('proxy-revalidate', None, bool) + s_maxage = cache_property('s-maxage', None, None) + + +# attach cache_property to the _CacheControl as staticmethod +# so that others can reuse it. +_CacheControl.cache_property = staticmethod(cache_property) + + +class CallbackDict(UpdateDictMixin, dict): + + """A dict that calls a function passed every time something is changed. + The function is passed the dict instance. + """ + + def __init__(self, initial=None, on_update=None): + dict.__init__(self, initial or ()) + self.on_update = on_update + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, + dict.__repr__(self) + ) + + +class HeaderSet(MutableSet): + + """Similar to the :class:`ETags` class this implements a set-like structure. + Unlike :class:`ETags` this is case insensitive and used for vary, allow, and + content-language headers. + + If not constructed using the :func:`parse_set_header` function the + instantiation works like this: + + >>> hs = HeaderSet(['foo', 'bar', 'baz']) + >>> hs + HeaderSet(['foo', 'bar', 'baz']) + """ + + def __init__(self, headers=None, on_update=None): + self._headers = list(headers or ()) + self._set = set([x.lower() for x in self._headers]) + self.on_update = on_update + + def add(self, header): + """Add a new header to the set.""" + self.update((header,)) + + def remove(self, header): + """Remove a header from the set. This raises an :exc:`KeyError` if the + header is not in the set. + + .. versionchanged:: 0.5 + In older versions a :exc:`IndexError` was raised instead of a + :exc:`KeyError` if the object was missing. + + :param header: the header to be removed. + """ + key = header.lower() + if key not in self._set: + raise KeyError(header) + self._set.remove(key) + for idx, key in enumerate(self._headers): + if key.lower() == header: + del self._headers[idx] + break + if self.on_update is not None: + self.on_update(self) + + def update(self, iterable): + """Add all the headers from the iterable to the set. + + :param iterable: updates the set with the items from the iterable. + """ + inserted_any = False + for header in iterable: + key = header.lower() + if key not in self._set: + self._headers.append(header) + self._set.add(key) + inserted_any = True + if inserted_any and self.on_update is not None: + self.on_update(self) + + def discard(self, header): + """Like :meth:`remove` but ignores errors. + + :param header: the header to be discarded. + """ + try: + return self.remove(header) + except KeyError: + pass + + def find(self, header): + """Return the index of the header in the set or return -1 if not found. + + :param header: the header to be looked up. + """ + header = header.lower() + for idx, item in enumerate(self._headers): + if item.lower() == header: + return idx + return -1 + + def index(self, header): + """Return the index of the header in the set or raise an + :exc:`IndexError`. + + :param header: the header to be looked up. + """ + rv = self.find(header) + if rv < 0: + raise IndexError(header) + return rv + + def clear(self): + """Clear the set.""" + self._set.clear() + del self._headers[:] + if self.on_update is not None: + self.on_update(self) + + def as_set(self, preserve_casing=False): + """Return the set as real python set type. When calling this, all + the items are converted to lowercase and the ordering is lost. + + :param preserve_casing: if set to `True` the items in the set returned + will have the original case like in the + :class:`HeaderSet`, otherwise they will + be lowercase. + """ + if preserve_casing: + return set(self._headers) + return set(self._set) + + def to_header(self): + """Convert the header set into an HTTP header string.""" + return ', '.join(map(quote_header_value, self._headers)) + + def __getitem__(self, idx): + return self._headers[idx] + + def __delitem__(self, idx): + rv = self._headers.pop(idx) + self._set.remove(rv.lower()) + if self.on_update is not None: + self.on_update(self) + + def __setitem__(self, idx, value): + old = self._headers[idx] + self._set.remove(old.lower()) + self._headers[idx] = value + self._set.add(value.lower()) + if self.on_update is not None: + self.on_update(self) + + def __contains__(self, header): + return header.lower() in self._set + + def __len__(self): + return len(self._set) + + def __iter__(self): + return iter(self._headers) + + def __nonzero__(self): + return bool(self._set) + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '%s(%r)' % ( + self.__class__.__name__, + self._headers + ) + + +class ETags(Container, Iterable): + + """A set that can be used to check if one etag is present in a collection + of etags. + """ + + def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): + self._strong = frozenset(not star_tag and strong_etags or ()) + self._weak = frozenset(weak_etags or ()) + self.star_tag = star_tag + + def as_set(self, include_weak=False): + """Convert the `ETags` object into a python set. Per default all the + weak etags are not part of this set.""" + rv = set(self._strong) + if include_weak: + rv.update(self._weak) + return rv + + def is_weak(self, etag): + """Check if an etag is weak.""" + return etag in self._weak + + def is_strong(self, etag): + """Check if an etag is strong.""" + return etag in self._strong + + def contains_weak(self, etag): + """Check if an etag is part of the set including weak and strong tags.""" + return self.is_weak(etag) or self.contains(etag) + + def contains(self, etag): + """Check if an etag is part of the set ignoring weak tags. + It is also possible to use the ``in`` operator. + """ + if self.star_tag: + return True + return self.is_strong(etag) + + def contains_raw(self, etag): + """When passed a quoted tag it will check if this tag is part of the + set. If the tag is weak it is checked against weak and strong tags, + otherwise strong only.""" + etag, weak = unquote_etag(etag) + if weak: + return self.contains_weak(etag) + return self.contains(etag) + + def to_header(self): + """Convert the etags set into a HTTP header string.""" + if self.star_tag: + return '*' + return ', '.join( + ['"%s"' % x for x in self._strong] + + ['W/"%s"' % x for x in self._weak] + ) + + def __call__(self, etag=None, data=None, include_weak=False): + if [etag, data].count(None) != 1: + raise TypeError('either tag or data required, but at least one') + if etag is None: + etag = generate_etag(data) + if include_weak: + if etag in self._weak: + return True + return etag in self._strong + + def __bool__(self): + return bool(self.star_tag or self._strong or self._weak) + + __nonzero__ = __bool__ + + def __str__(self): + return self.to_header() + + def __iter__(self): + return iter(self._strong) + + def __contains__(self, etag): + return self.contains(etag) + + def __repr__(self): + return '<%s %r>' % (self.__class__.__name__, str(self)) + + +class IfRange(object): + + """Very simple object that represents the `If-Range` header in parsed + form. It will either have neither a etag or date or one of either but + never both. + + .. versionadded:: 0.7 + """ + + def __init__(self, etag=None, date=None): + #: The etag parsed and unquoted. Ranges always operate on strong + #: etags so the weakness information is not necessary. + self.etag = etag + #: The date in parsed format or `None`. + self.date = date + + def to_header(self): + """Converts the object back into an HTTP header.""" + if self.date is not None: + return http_date(self.date) + if self.etag is not None: + return quote_etag(self.etag) + return '' + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '<%s %r>' % (self.__class__.__name__, str(self)) + + +class Range(object): + + """Represents a range header. All the methods are only supporting bytes + as unit. It does store multiple ranges but :meth:`range_for_length` will + only work if only one range is provided. + + .. versionadded:: 0.7 + """ + + def __init__(self, units, ranges): + #: The units of this range. Usually "bytes". + self.units = units + #: A list of ``(begin, end)`` tuples for the range header provided. + #: The ranges are non-inclusive. + self.ranges = ranges + + def range_for_length(self, length): + """If the range is for bytes, the length is not None and there is + exactly one range and it is satisfiable it returns a ``(start, stop)`` + tuple, otherwise `None`. + """ + if self.units != 'bytes' or length is None or len(self.ranges) != 1: + return None + start, end = self.ranges[0] + if end is None: + end = length + if start < 0: + start += length + if is_byte_range_valid(start, end, length): + return start, min(end, length) + + def make_content_range(self, length): + """Creates a :class:`~werkzeug.datastructures.ContentRange` object + from the current range and given content length. + """ + rng = self.range_for_length(length) + if rng is not None: + return ContentRange(self.units, rng[0], rng[1], length) + + def to_header(self): + """Converts the object back into an HTTP header.""" + ranges = [] + for begin, end in self.ranges: + if end is None: + ranges.append(begin >= 0 and '%s-' % begin or str(begin)) + else: + ranges.append('%s-%s' % (begin, end - 1)) + return '%s=%s' % (self.units, ','.join(ranges)) + + def to_content_range_header(self, length): + """Converts the object into `Content-Range` HTTP header, + based on given length + """ + range_for_length = self.range_for_length(length) + if range_for_length is not None: + return '%s %d-%d/%d' % (self.units, + range_for_length[0], + range_for_length[1] - 1, length) + return None + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '<%s %r>' % (self.__class__.__name__, str(self)) + + +class ContentRange(object): + + """Represents the content range header. + + .. versionadded:: 0.7 + """ + + def __init__(self, units, start, stop, length=None, on_update=None): + assert is_byte_range_valid(start, stop, length), \ + 'Bad range provided' + self.on_update = on_update + self.set(start, stop, length, units) + + def _callback_property(name): + def fget(self): + return getattr(self, name) + + def fset(self, value): + setattr(self, name, value) + if self.on_update is not None: + self.on_update(self) + return property(fget, fset) + + #: The units to use, usually "bytes" + units = _callback_property('_units') + #: The start point of the range or `None`. + start = _callback_property('_start') + #: The stop point of the range (non-inclusive) or `None`. Can only be + #: `None` if also start is `None`. + stop = _callback_property('_stop') + #: The length of the range or `None`. + length = _callback_property('_length') + + def set(self, start, stop, length=None, units='bytes'): + """Simple method to update the ranges.""" + assert is_byte_range_valid(start, stop, length), \ + 'Bad range provided' + self._units = units + self._start = start + self._stop = stop + self._length = length + if self.on_update is not None: + self.on_update(self) + + def unset(self): + """Sets the units to `None` which indicates that the header should + no longer be used. + """ + self.set(None, None, units=None) + + def to_header(self): + if self.units is None: + return '' + if self.length is None: + length = '*' + else: + length = self.length + if self.start is None: + return '%s */%s' % (self.units, length) + return '%s %s-%s/%s' % ( + self.units, + self.start, + self.stop - 1, + length + ) + + def __nonzero__(self): + return self.units is not None + + __bool__ = __nonzero__ + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '<%s %r>' % (self.__class__.__name__, str(self)) + + +class Authorization(ImmutableDictMixin, dict): + + """Represents an `Authorization` header sent by the client. You should + not create this kind of object yourself but use it when it's returned by + the `parse_authorization_header` function. + + This object is a dict subclass and can be altered by setting dict items + but it should be considered immutable as it's returned by the client and + not meant for modifications. + + .. versionchanged:: 0.5 + This object became immutable. + """ + + def __init__(self, auth_type, data=None): + dict.__init__(self, data or {}) + self.type = auth_type + + username = property(lambda x: x.get('username'), doc=''' + The username transmitted. This is set for both basic and digest + auth all the time.''') + password = property(lambda x: x.get('password'), doc=''' + When the authentication type is basic this is the password + transmitted by the client, else `None`.''') + realm = property(lambda x: x.get('realm'), doc=''' + This is the server realm sent back for HTTP digest auth.''') + nonce = property(lambda x: x.get('nonce'), doc=''' + The nonce the server sent for digest auth, sent back by the client. + A nonce should be unique for every 401 response for HTTP digest + auth.''') + uri = property(lambda x: x.get('uri'), doc=''' + The URI from Request-URI of the Request-Line; duplicated because + proxies are allowed to change the Request-Line in transit. HTTP + digest auth only.''') + nc = property(lambda x: x.get('nc'), doc=''' + The nonce count value transmitted by clients if a qop-header is + also transmitted. HTTP digest auth only.''') + cnonce = property(lambda x: x.get('cnonce'), doc=''' + If the server sent a qop-header in the ``WWW-Authenticate`` + header, the client has to provide this value for HTTP digest auth. + See the RFC for more details.''') + response = property(lambda x: x.get('response'), doc=''' + A string of 32 hex digits computed as defined in RFC 2617, which + proves that the user knows a password. Digest auth only.''') + opaque = property(lambda x: x.get('opaque'), doc=''' + The opaque header from the server returned unchanged by the client. + It is recommended that this string be base64 or hexadecimal data. + Digest auth only.''') + qop = property(lambda x: x.get('qop'), doc=''' + Indicates what "quality of protection" the client has applied to + the message for HTTP digest auth. Note that this is a single token, + not a quoted list of alternatives as in WWW-Authenticate.''') + + +class WWWAuthenticate(UpdateDictMixin, dict): + + """Provides simple access to `WWW-Authenticate` headers.""" + + #: list of keys that require quoting in the generated header + _require_quoting = frozenset(['domain', 'nonce', 'opaque', 'realm', 'qop']) + + def __init__(self, auth_type=None, values=None, on_update=None): + dict.__init__(self, values or ()) + if auth_type: + self['__auth_type__'] = auth_type + self.on_update = on_update + + def set_basic(self, realm='authentication required'): + """Clear the auth info and enable basic auth.""" + dict.clear(self) + dict.update(self, {'__auth_type__': 'basic', 'realm': realm}) + if self.on_update: + self.on_update(self) + + def set_digest(self, realm, nonce, qop=('auth',), opaque=None, + algorithm=None, stale=False): + """Clear the auth info and enable digest auth.""" + d = { + '__auth_type__': 'digest', + 'realm': realm, + 'nonce': nonce, + 'qop': dump_header(qop) + } + if stale: + d['stale'] = 'TRUE' + if opaque is not None: + d['opaque'] = opaque + if algorithm is not None: + d['algorithm'] = algorithm + dict.clear(self) + dict.update(self, d) + if self.on_update: + self.on_update(self) + + def to_header(self): + """Convert the stored values into a WWW-Authenticate header.""" + d = dict(self) + auth_type = d.pop('__auth_type__', None) or 'basic' + return '%s %s' % (auth_type.title(), ', '.join([ + '%s=%s' % (key, quote_header_value(value, + allow_token=key not in self._require_quoting)) + for key, value in iteritems(d) + ])) + + def __str__(self): + return self.to_header() + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, + self.to_header() + ) + + def auth_property(name, doc=None): + """A static helper function for subclasses to add extra authentication + system properties onto a class:: + + class FooAuthenticate(WWWAuthenticate): + special_realm = auth_property('special_realm') + + For more information have a look at the sourcecode to see how the + regular properties (:attr:`realm` etc.) are implemented. + """ + + def _set_value(self, value): + if value is None: + self.pop(name, None) + else: + self[name] = str(value) + return property(lambda x: x.get(name), _set_value, doc=doc) + + def _set_property(name, doc=None): + def fget(self): + def on_update(header_set): + if not header_set and name in self: + del self[name] + elif header_set: + self[name] = header_set.to_header() + return parse_set_header(self.get(name), on_update) + return property(fget, doc=doc) + + type = auth_property('__auth_type__', doc=''' + The type of the auth mechanism. HTTP currently specifies + `Basic` and `Digest`.''') + realm = auth_property('realm', doc=''' + A string to be displayed to users so they know which username and + password to use. This string should contain at least the name of + the host performing the authentication and might additionally + indicate the collection of users who might have access.''') + domain = _set_property('domain', doc=''' + A list of URIs that define the protection space. If a URI is an + absolute path, it is relative to the canonical root URL of the + server being accessed.''') + nonce = auth_property('nonce', doc=''' + A server-specified data string which should be uniquely generated + each time a 401 response is made. It is recommended that this + string be base64 or hexadecimal data.''') + opaque = auth_property('opaque', doc=''' + A string of data, specified by the server, which should be returned + by the client unchanged in the Authorization header of subsequent + requests with URIs in the same protection space. It is recommended + that this string be base64 or hexadecimal data.''') + algorithm = auth_property('algorithm', doc=''' + A string indicating a pair of algorithms used to produce the digest + and a checksum. If this is not present it is assumed to be "MD5". + If the algorithm is not understood, the challenge should be ignored + (and a different one used, if there is more than one).''') + qop = _set_property('qop', doc=''' + A set of quality-of-privacy directives such as auth and auth-int.''') + + def _get_stale(self): + val = self.get('stale') + if val is not None: + return val.lower() == 'true' + + def _set_stale(self, value): + if value is None: + self.pop('stale', None) + else: + self['stale'] = value and 'TRUE' or 'FALSE' + stale = property(_get_stale, _set_stale, doc=''' + A flag, indicating that the previous request from the client was + rejected because the nonce value was stale.''') + del _get_stale, _set_stale + + # make auth_property a staticmethod so that subclasses of + # `WWWAuthenticate` can use it for new properties. + auth_property = staticmethod(auth_property) + del _set_property + + +class FileStorage(object): + + """The :class:`FileStorage` class is a thin wrapper over incoming files. + It is used by the request object to represent uploaded files. All the + attributes of the wrapper stream are proxied by the file storage so + it's possible to do ``storage.read()`` instead of the long form + ``storage.stream.read()``. + """ + + def __init__(self, stream=None, filename=None, name=None, + content_type=None, content_length=None, + headers=None): + self.name = name + self.stream = stream or _empty_stream + + # if no filename is provided we can attempt to get the filename + # from the stream object passed. There we have to be careful to + # skip things like , etc. Python marks these + # special filenames with angular brackets. + if filename is None: + filename = getattr(stream, 'name', None) + s = make_literal_wrapper(filename) + if filename and filename[0] == s('<') and filename[-1] == s('>'): + filename = None + + # On Python 3 we want to make sure the filename is always unicode. + # This might not be if the name attribute is bytes due to the + # file being opened from the bytes API. + if not PY2 and isinstance(filename, bytes): + filename = filename.decode(get_filesystem_encoding(), + 'replace') + + self.filename = filename + if headers is None: + headers = Headers() + self.headers = headers + if content_type is not None: + headers['Content-Type'] = content_type + if content_length is not None: + headers['Content-Length'] = str(content_length) + + def _parse_content_type(self): + if not hasattr(self, '_parsed_content_type'): + self._parsed_content_type = \ + parse_options_header(self.content_type) + + @property + def content_type(self): + """The content-type sent in the header. Usually not available""" + return self.headers.get('content-type') + + @property + def content_length(self): + """The content-length sent in the header. Usually not available""" + return int(self.headers.get('content-length') or 0) + + @property + def mimetype(self): + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + + .. versionadded:: 0.7 + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self): + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.7 + """ + self._parse_content_type() + return self._parsed_content_type[1] + + def save(self, dst, buffer_size=16384): + """Save the file to a destination path or file object. If the + destination is a file object you have to close it yourself after the + call. The buffer size is the number of bytes held in memory during + the copy process. It defaults to 16KB. + + For secure file saving also have a look at :func:`secure_filename`. + + :param dst: a filename or open file object the uploaded file + is saved to. + :param buffer_size: the size of the buffer. This works the same as + the `length` parameter of + :func:`shutil.copyfileobj`. + """ + from shutil import copyfileobj + close_dst = False + if isinstance(dst, string_types): + dst = open(dst, 'wb') + close_dst = True + try: + copyfileobj(self.stream, dst, buffer_size) + finally: + if close_dst: + dst.close() + + def close(self): + """Close the underlying file if possible.""" + try: + self.stream.close() + except Exception: + pass + + def __nonzero__(self): + return bool(self.filename) + __bool__ = __nonzero__ + + def __getattr__(self, name): + return getattr(self.stream, name) + + def __iter__(self): + return iter(self.stream) + + def __repr__(self): + return '<%s: %r (%r)>' % ( + self.__class__.__name__, + self.filename, + self.content_type + ) + + +# circular dependencies +from werkzeug.http import dump_options_header, dump_header, generate_etag, \ + quote_header_value, parse_set_header, unquote_etag, quote_etag, \ + parse_options_header, http_date, is_byte_range_valid +from werkzeug import exceptions diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__init__.py b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__init__.py new file mode 100644 index 0000000..14fe193 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__init__.py @@ -0,0 +1,470 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.debug + ~~~~~~~~~~~~~~ + + WSGI application traceback debugger. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import os +import re +import sys +import uuid +import json +import time +import getpass +import hashlib +import mimetypes +from itertools import chain +from os.path import join, dirname, basename, isfile +from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response +from werkzeug.http import parse_cookie +from werkzeug.debug.tbtools import get_current_traceback, render_console_html +from werkzeug.debug.console import Console +from werkzeug.security import gen_salt +from werkzeug._internal import _log +from werkzeug._compat import text_type + + +# DEPRECATED +#: import this here because it once was documented as being available +#: from this module. In case there are users left ... +from werkzeug.debug.repr import debug_repr # noqa + + +# A week +PIN_TIME = 60 * 60 * 24 * 7 + + +def hash_pin(pin): + if isinstance(pin, text_type): + pin = pin.encode('utf-8', 'replace') + return hashlib.md5(pin + b'shittysalt').hexdigest()[:12] + + +_machine_id = None + + +def get_machine_id(): + global _machine_id + rv = _machine_id + if rv is not None: + return rv + + def _generate(): + # Potential sources of secret information on linux. The machine-id + # is stable across boots, the boot id is not + for filename in '/etc/machine-id', '/proc/sys/kernel/random/boot_id': + try: + with open(filename, 'rb') as f: + return f.readline().strip() + except IOError: + continue + + # On OS X we can use the computer's serial number assuming that + # ioreg exists and can spit out that information. + try: + # Also catch import errors: subprocess may not be available, e.g. + # Google App Engine + # See https://github.com/pallets/werkzeug/issues/925 + from subprocess import Popen, PIPE + dump = Popen(['ioreg', '-c', 'IOPlatformExpertDevice', '-d', '2'], + stdout=PIPE).communicate()[0] + match = re.search(b'"serial-number" = <([^>]+)', dump) + if match is not None: + return match.group(1) + except (OSError, ImportError): + pass + + # On Windows we can use winreg to get the machine guid + wr = None + try: + import winreg as wr + except ImportError: + try: + import _winreg as wr + except ImportError: + pass + if wr is not None: + try: + with wr.OpenKey(wr.HKEY_LOCAL_MACHINE, + 'SOFTWARE\\Microsoft\\Cryptography', 0, + wr.KEY_READ | wr.KEY_WOW64_64KEY) as rk: + machineGuid, wrType = wr.QueryValueEx(rk, 'MachineGuid') + if (wrType == wr.REG_SZ): + return machineGuid.encode('utf-8') + else: + return machineGuid + except WindowsError: + pass + + _machine_id = rv = _generate() + return rv + + +class _ConsoleFrame(object): + + """Helper class so that we can reuse the frame console code for the + standalone console. + """ + + def __init__(self, namespace): + self.console = Console(namespace) + self.id = 0 + + +def get_pin_and_cookie_name(app): + """Given an application object this returns a semi-stable 9 digit pin + code and a random key. The hope is that this is stable between + restarts to not make debugging particularly frustrating. If the pin + was forcefully disabled this returns `None`. + + Second item in the resulting tuple is the cookie name for remembering. + """ + pin = os.environ.get('WERKZEUG_DEBUG_PIN') + rv = None + num = None + + # Pin was explicitly disabled + if pin == 'off': + return None, None + + # Pin was provided explicitly + if pin is not None and pin.replace('-', '').isdigit(): + # If there are separators in the pin, return it directly + if '-' in pin: + rv = pin + else: + num = pin + + modname = getattr(app, '__module__', + getattr(app.__class__, '__module__')) + + try: + # `getpass.getuser()` imports the `pwd` module, + # which does not exist in the Google App Engine sandbox. + username = getpass.getuser() + except ImportError: + username = None + + mod = sys.modules.get(modname) + + # This information only exists to make the cookie unique on the + # computer, not as a security feature. + probably_public_bits = [ + username, + modname, + getattr(app, '__name__', getattr(app.__class__, '__name__')), + getattr(mod, '__file__', None), + ] + + # This information is here to make it harder for an attacker to + # guess the cookie name. They are unlikely to be contained anywhere + # within the unauthenticated debug page. + private_bits = [ + str(uuid.getnode()), + get_machine_id(), + ] + + h = hashlib.md5() + for bit in chain(probably_public_bits, private_bits): + if not bit: + continue + if isinstance(bit, text_type): + bit = bit.encode('utf-8') + h.update(bit) + h.update(b'cookiesalt') + + cookie_name = '__wzd' + h.hexdigest()[:20] + + # If we need to generate a pin we salt it a bit more so that we don't + # end up with the same value and generate out 9 digits + if num is None: + h.update(b'pinsalt') + num = ('%09d' % int(h.hexdigest(), 16))[:9] + + # Format the pincode in groups of digits for easier remembering if + # we don't have a result yet. + if rv is None: + for group_size in 5, 4, 3: + if len(num) % group_size == 0: + rv = '-'.join(num[x:x + group_size].rjust(group_size, '0') + for x in range(0, len(num), group_size)) + break + else: + rv = num + + return rv, cookie_name + + +class DebuggedApplication(object): + """Enables debugging support for a given application:: + + from werkzeug.debug import DebuggedApplication + from myapp import app + app = DebuggedApplication(app, evalex=True) + + The `evalex` keyword argument allows evaluating expressions in a + traceback's frame context. + + .. versionadded:: 0.9 + The `lodgeit_url` parameter was deprecated. + + :param app: the WSGI application to run debugged. + :param evalex: enable exception evaluation feature (interactive + debugging). This requires a non-forking server. + :param request_key: The key that points to the request object in ths + environment. This parameter is ignored in current + versions. + :param console_path: the URL for a general purpose console. + :param console_init_func: the function that is executed before starting + the general purpose console. The return value + is used as initial namespace. + :param show_hidden_frames: by default hidden traceback frames are skipped. + You can show them by setting this parameter + to `True`. + :param pin_security: can be used to disable the pin based security system. + :param pin_logging: enables the logging of the pin system. + """ + + def __init__(self, app, evalex=False, request_key='werkzeug.request', + console_path='/console', console_init_func=None, + show_hidden_frames=False, lodgeit_url=None, + pin_security=True, pin_logging=True): + if lodgeit_url is not None: + from warnings import warn + warn(DeprecationWarning('Werkzeug now pastes into gists.')) + if not console_init_func: + console_init_func = None + self.app = app + self.evalex = evalex + self.frames = {} + self.tracebacks = {} + self.request_key = request_key + self.console_path = console_path + self.console_init_func = console_init_func + self.show_hidden_frames = show_hidden_frames + self.secret = gen_salt(20) + self._failed_pin_auth = 0 + + self.pin_logging = pin_logging + if pin_security: + # Print out the pin for the debugger on standard out. + if os.environ.get('WERKZEUG_RUN_MAIN') == 'true' and \ + pin_logging: + _log('warning', ' * Debugger is active!') + if self.pin is None: + _log('warning', ' * Debugger PIN disabled. ' + 'DEBUGGER UNSECURED!') + else: + _log('info', ' * Debugger PIN: %s' % self.pin) + else: + self.pin = None + + def _get_pin(self): + if not hasattr(self, '_pin'): + self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) + return self._pin + + def _set_pin(self, value): + self._pin = value + + pin = property(_get_pin, _set_pin) + del _get_pin, _set_pin + + @property + def pin_cookie_name(self): + """The name of the pin cookie.""" + if not hasattr(self, '_pin_cookie'): + self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) + return self._pin_cookie + + def debug_application(self, environ, start_response): + """Run the application and conserve the traceback frames.""" + app_iter = None + try: + app_iter = self.app(environ, start_response) + for item in app_iter: + yield item + if hasattr(app_iter, 'close'): + app_iter.close() + except Exception: + if hasattr(app_iter, 'close'): + app_iter.close() + traceback = get_current_traceback( + skip=1, show_hidden_frames=self.show_hidden_frames, + ignore_system_exceptions=True) + for frame in traceback.frames: + self.frames[frame.id] = frame + self.tracebacks[traceback.id] = traceback + + try: + start_response('500 INTERNAL SERVER ERROR', [ + ('Content-Type', 'text/html; charset=utf-8'), + # Disable Chrome's XSS protection, the debug + # output can cause false-positives. + ('X-XSS-Protection', '0'), + ]) + except Exception: + # if we end up here there has been output but an error + # occurred. in that situation we can do nothing fancy any + # more, better log something into the error log and fall + # back gracefully. + environ['wsgi.errors'].write( + 'Debugging middleware caught exception in streamed ' + 'response at a point where response headers were already ' + 'sent.\n') + else: + is_trusted = bool(self.check_pin_trust(environ)) + yield traceback.render_full(evalex=self.evalex, + evalex_trusted=is_trusted, + secret=self.secret) \ + .encode('utf-8', 'replace') + + traceback.log(environ['wsgi.errors']) + + def execute_command(self, request, command, frame): + """Execute a command in a console.""" + return Response(frame.console.eval(command), mimetype='text/html') + + def display_console(self, request): + """Display a standalone shell.""" + if 0 not in self.frames: + if self.console_init_func is None: + ns = {} + else: + ns = dict(self.console_init_func()) + ns.setdefault('app', self.app) + self.frames[0] = _ConsoleFrame(ns) + is_trusted = bool(self.check_pin_trust(request.environ)) + return Response(render_console_html(secret=self.secret, + evalex_trusted=is_trusted), + mimetype='text/html') + + def paste_traceback(self, request, traceback): + """Paste the traceback and return a JSON response.""" + rv = traceback.paste() + return Response(json.dumps(rv), mimetype='application/json') + + def get_resource(self, request, filename): + """Return a static resource from the shared folder.""" + filename = join(dirname(__file__), 'shared', basename(filename)) + if isfile(filename): + mimetype = mimetypes.guess_type(filename)[0] \ + or 'application/octet-stream' + f = open(filename, 'rb') + try: + return Response(f.read(), mimetype=mimetype) + finally: + f.close() + return Response('Not Found', status=404) + + def check_pin_trust(self, environ): + """Checks if the request passed the pin test. This returns `True` if the + request is trusted on a pin/cookie basis and returns `False` if not. + Additionally if the cookie's stored pin hash is wrong it will return + `None` so that appropriate action can be taken. + """ + if self.pin is None: + return True + val = parse_cookie(environ).get(self.pin_cookie_name) + if not val or '|' not in val: + return False + ts, pin_hash = val.split('|', 1) + if not ts.isdigit(): + return False + if pin_hash != hash_pin(self.pin): + return None + return (time.time() - PIN_TIME) < int(ts) + + def _fail_pin_auth(self): + time.sleep(self._failed_pin_auth > 5 and 5.0 or 0.5) + self._failed_pin_auth += 1 + + def pin_auth(self, request): + """Authenticates with the pin.""" + exhausted = False + auth = False + trust = self.check_pin_trust(request.environ) + + # If the trust return value is `None` it means that the cookie is + # set but the stored pin hash value is bad. This means that the + # pin was changed. In this case we count a bad auth and unset the + # cookie. This way it becomes harder to guess the cookie name + # instead of the pin as we still count up failures. + bad_cookie = False + if trust is None: + self._fail_pin_auth() + bad_cookie = True + + # If we're trusted, we're authenticated. + elif trust: + auth = True + + # If we failed too many times, then we're locked out. + elif self._failed_pin_auth > 10: + exhausted = True + + # Otherwise go through pin based authentication + else: + entered_pin = request.args.get('pin') + if entered_pin.strip().replace('-', '') == \ + self.pin.replace('-', ''): + self._failed_pin_auth = 0 + auth = True + else: + self._fail_pin_auth() + + rv = Response(json.dumps({ + 'auth': auth, + 'exhausted': exhausted, + }), mimetype='application/json') + if auth: + rv.set_cookie(self.pin_cookie_name, '%s|%s' % ( + int(time.time()), + hash_pin(self.pin) + ), httponly=True) + elif bad_cookie: + rv.delete_cookie(self.pin_cookie_name) + return rv + + def log_pin_request(self): + """Log the pin if needed.""" + if self.pin_logging and self.pin is not None: + _log('info', ' * To enable the debugger you need to ' + 'enter the security pin:') + _log('info', ' * Debugger pin code: %s' % self.pin) + return Response('') + + def __call__(self, environ, start_response): + """Dispatch the requests.""" + # important: don't ever access a function here that reads the incoming + # form data! Otherwise the application won't have access to that data + # any more! + request = Request(environ) + response = self.debug_application + if request.args.get('__debugger__') == 'yes': + cmd = request.args.get('cmd') + arg = request.args.get('f') + secret = request.args.get('s') + traceback = self.tracebacks.get(request.args.get('tb', type=int)) + frame = self.frames.get(request.args.get('frm', type=int)) + if cmd == 'resource' and arg: + response = self.get_resource(request, arg) + elif cmd == 'paste' and traceback is not None and \ + secret == self.secret: + response = self.paste_traceback(request, traceback) + elif cmd == 'pinauth' and secret == self.secret: + response = self.pin_auth(request) + elif cmd == 'printpin' and secret == self.secret: + response = self.log_pin_request() + elif self.evalex and cmd is not None and frame is not None \ + and self.secret == secret and \ + self.check_pin_trust(environ): + response = self.execute_command(request, cmd, frame) + elif self.evalex and self.console_path is not None and \ + request.path == self.console_path: + response = self.display_console(request) + return response(environ, start_response) diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/__init__.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..872c27b Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/__init__.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/console.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/console.cpython-36.pyc new file mode 100644 index 0000000..8a327c1 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/console.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/repr.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/repr.cpython-36.pyc new file mode 100644 index 0000000..d41a4a6 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/repr.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-36.pyc b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-36.pyc new file mode 100644 index 0000000..1728ac0 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-36.pyc differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/console.py b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/console.py new file mode 100644 index 0000000..30e8906 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/console.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.debug.console + ~~~~~~~~~~~~~~~~~~~~~~ + + Interactive console support. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD. +""" +import sys +import code +from types import CodeType + +from werkzeug.utils import escape +from werkzeug.local import Local +from werkzeug.debug.repr import debug_repr, dump, helper + + +_local = Local() + + +class HTMLStringO(object): + + """A StringO version that HTML escapes on write.""" + + def __init__(self): + self._buffer = [] + + def isatty(self): + return False + + def close(self): + pass + + def flush(self): + pass + + def seek(self, n, mode=0): + pass + + def readline(self): + if len(self._buffer) == 0: + return '' + ret = self._buffer[0] + del self._buffer[0] + return ret + + def reset(self): + val = ''.join(self._buffer) + del self._buffer[:] + return val + + def _write(self, x): + if isinstance(x, bytes): + x = x.decode('utf-8', 'replace') + self._buffer.append(x) + + def write(self, x): + self._write(escape(x)) + + def writelines(self, x): + self._write(escape(''.join(x))) + + +class ThreadedStream(object): + + """Thread-local wrapper for sys.stdout for the interactive console.""" + + def push(): + if not isinstance(sys.stdout, ThreadedStream): + sys.stdout = ThreadedStream() + _local.stream = HTMLStringO() + push = staticmethod(push) + + def fetch(): + try: + stream = _local.stream + except AttributeError: + return '' + return stream.reset() + fetch = staticmethod(fetch) + + def displayhook(obj): + try: + stream = _local.stream + except AttributeError: + return _displayhook(obj) + # stream._write bypasses escaping as debug_repr is + # already generating HTML for us. + if obj is not None: + _local._current_ipy.locals['_'] = obj + stream._write(debug_repr(obj)) + displayhook = staticmethod(displayhook) + + def __setattr__(self, name, value): + raise AttributeError('read only attribute %s' % name) + + def __dir__(self): + return dir(sys.__stdout__) + + def __getattribute__(self, name): + if name == '__members__': + return dir(sys.__stdout__) + try: + stream = _local.stream + except AttributeError: + stream = sys.__stdout__ + return getattr(stream, name) + + def __repr__(self): + return repr(sys.__stdout__) + + +# add the threaded stream as display hook +_displayhook = sys.displayhook +sys.displayhook = ThreadedStream.displayhook + + +class _ConsoleLoader(object): + + def __init__(self): + self._storage = {} + + def register(self, code, source): + self._storage[id(code)] = source + # register code objects of wrapped functions too. + for var in code.co_consts: + if isinstance(var, CodeType): + self._storage[id(var)] = source + + def get_source_by_code(self, code): + try: + return self._storage[id(code)] + except KeyError: + pass + + +def _wrap_compiler(console): + compile = console.compile + + def func(source, filename, symbol): + code = compile(source, filename, symbol) + console.loader.register(code, source) + return code + console.compile = func + + +class _InteractiveConsole(code.InteractiveInterpreter): + + def __init__(self, globals, locals): + code.InteractiveInterpreter.__init__(self, locals) + self.globals = dict(globals) + self.globals['dump'] = dump + self.globals['help'] = helper + self.globals['__loader__'] = self.loader = _ConsoleLoader() + self.more = False + self.buffer = [] + _wrap_compiler(self) + + def runsource(self, source): + source = source.rstrip() + '\n' + ThreadedStream.push() + prompt = self.more and '... ' or '>>> ' + try: + source_to_eval = ''.join(self.buffer + [source]) + if code.InteractiveInterpreter.runsource(self, + source_to_eval, '', 'single'): + self.more = True + self.buffer.append(source) + else: + self.more = False + del self.buffer[:] + finally: + output = ThreadedStream.fetch() + return prompt + escape(source) + output + + def runcode(self, code): + try: + eval(code, self.globals, self.locals) + except Exception: + self.showtraceback() + + def showtraceback(self): + from werkzeug.debug.tbtools import get_current_traceback + tb = get_current_traceback(skip=1) + sys.stdout._write(tb.render_summary()) + + def showsyntaxerror(self, filename=None): + from werkzeug.debug.tbtools import get_current_traceback + tb = get_current_traceback(skip=4) + sys.stdout._write(tb.render_summary()) + + def write(self, data): + sys.stdout.write(data) + + +class Console(object): + + """An interactive console.""" + + def __init__(self, globals=None, locals=None): + if locals is None: + locals = {} + if globals is None: + globals = {} + self._ipy = _InteractiveConsole(globals, locals) + + def eval(self, code): + _local._current_ipy = self._ipy + old_sys_stdout = sys.stdout + try: + return self._ipy.runsource(code) + finally: + sys.stdout = old_sys_stdout diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/repr.py b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/repr.py new file mode 100644 index 0000000..0a02457 --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/repr.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.debug.repr + ~~~~~~~~~~~~~~~~~~~ + + This module implements object representations for debugging purposes. + Unlike the default repr these reprs expose a lot more information and + produce HTML instead of ASCII. + + Together with the CSS and JavaScript files of the debugger this gives + a colorful and more compact output. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD. +""" +import sys +import re +import codecs +from traceback import format_exception_only +try: + from collections import deque +except ImportError: # pragma: no cover + deque = None +from werkzeug.utils import escape +from werkzeug._compat import iteritems, PY2, text_type, integer_types, \ + string_types + + +missing = object() +_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}') +RegexType = type(_paragraph_re) + + +HELP_HTML = '''\ +
+

%(title)s

+
%(text)s
+
\ +''' +OBJECT_DUMP_HTML = '''\ +
+

%(title)s

+ %(repr)s + %(items)s
+
\ +''' + + +def debug_repr(obj): + """Creates a debug repr of an object as HTML unicode string.""" + return DebugReprGenerator().repr(obj) + + +def dump(obj=missing): + """Print the object details to stdout._write (for the interactive + console of the web debugger. + """ + gen = DebugReprGenerator() + if obj is missing: + rv = gen.dump_locals(sys._getframe(1).f_locals) + else: + rv = gen.dump_object(obj) + sys.stdout._write(rv) + + +class _Helper(object): + + """Displays an HTML version of the normal help, for the interactive + debugger only because it requires a patched sys.stdout. + """ + + def __repr__(self): + return 'Type help(object) for help about object.' + + def __call__(self, topic=None): + if topic is None: + sys.stdout._write('%s' % repr(self)) + return + import pydoc + pydoc.help(topic) + rv = sys.stdout.reset() + if isinstance(rv, bytes): + rv = rv.decode('utf-8', 'ignore') + paragraphs = _paragraph_re.split(rv) + if len(paragraphs) > 1: + title = paragraphs[0] + text = '\n\n'.join(paragraphs[1:]) + else: # pragma: no cover + title = 'Help' + text = paragraphs[0] + sys.stdout._write(HELP_HTML % {'title': title, 'text': text}) + + +helper = _Helper() + + +def _add_subclass_info(inner, obj, base): + if isinstance(base, tuple): + for base in base: + if type(obj) is base: + return inner + elif type(obj) is base: + return inner + module = '' + if obj.__class__.__module__ not in ('__builtin__', 'exceptions'): + module = '%s.' % obj.__class__.__module__ + return '%s%s(%s)' % (module, obj.__class__.__name__, inner) + + +class DebugReprGenerator(object): + + def __init__(self): + self._stack = [] + + def _sequence_repr_maker(left, right, base=object(), limit=8): + def proxy(self, obj, recursive): + if recursive: + return _add_subclass_info(left + '...' + right, obj, base) + buf = [left] + have_extended_section = False + for idx, item in enumerate(obj): + if idx: + buf.append(', ') + if idx == limit: + buf.append('') + have_extended_section = True + buf.append(self.repr(item)) + if have_extended_section: + buf.append('') + buf.append(right) + return _add_subclass_info(u''.join(buf), obj, base) + return proxy + + list_repr = _sequence_repr_maker('[', ']', list) + tuple_repr = _sequence_repr_maker('(', ')', tuple) + set_repr = _sequence_repr_maker('set([', '])', set) + frozenset_repr = _sequence_repr_maker('frozenset([', '])', frozenset) + if deque is not None: + deque_repr = _sequence_repr_maker('collections.' + 'deque([', '])', deque) + del _sequence_repr_maker + + def regex_repr(self, obj): + pattern = repr(obj.pattern) + if PY2: + pattern = pattern.decode('string-escape', 'ignore') + else: + pattern = codecs.decode(pattern, 'unicode-escape', 'ignore') + if pattern[:1] == 'u': + pattern = 'ur' + pattern[1:] + else: + pattern = 'r' + pattern + return u're.compile(%s)' % pattern + + def string_repr(self, obj, limit=70): + buf = [''] + a = repr(obj[:limit]) + b = repr(obj[limit:]) + if isinstance(obj, text_type) and PY2: + buf.append('u') + a = a[1:] + b = b[1:] + if b != "''": + buf.extend((escape(a[:-1]), '', escape(b[1:]), '')) + else: + buf.append(escape(a)) + buf.append('') + return _add_subclass_info(u''.join(buf), obj, (bytes, text_type)) + + def dict_repr(self, d, recursive, limit=5): + if recursive: + return _add_subclass_info(u'{...}', d, dict) + buf = ['{'] + have_extended_section = False + for idx, (key, value) in enumerate(iteritems(d)): + if idx: + buf.append(', ') + if idx == limit - 1: + buf.append('') + have_extended_section = True + buf.append('%s: ' + '%s' % + (self.repr(key), self.repr(value))) + if have_extended_section: + buf.append('') + buf.append('}') + return _add_subclass_info(u''.join(buf), d, dict) + + def object_repr(self, obj): + r = repr(obj) + if PY2: + r = r.decode('utf-8', 'replace') + return u'%s' % escape(r) + + def dispatch_repr(self, obj, recursive): + if obj is helper: + return u'%r' % helper + if isinstance(obj, (integer_types, float, complex)): + return u'%r' % obj + if isinstance(obj, string_types): + return self.string_repr(obj) + if isinstance(obj, RegexType): + return self.regex_repr(obj) + if isinstance(obj, list): + return self.list_repr(obj, recursive) + if isinstance(obj, tuple): + return self.tuple_repr(obj, recursive) + if isinstance(obj, set): + return self.set_repr(obj, recursive) + if isinstance(obj, frozenset): + return self.frozenset_repr(obj, recursive) + if isinstance(obj, dict): + return self.dict_repr(obj, recursive) + if deque is not None and isinstance(obj, deque): + return self.deque_repr(obj, recursive) + return self.object_repr(obj) + + def fallback_repr(self): + try: + info = ''.join(format_exception_only(*sys.exc_info()[:2])) + except Exception: # pragma: no cover + info = '?' + if PY2: + info = info.decode('utf-8', 'ignore') + return u'<broken repr (%s)>' \ + u'' % escape(info.strip()) + + def repr(self, obj): + recursive = False + for item in self._stack: + if item is obj: + recursive = True + break + self._stack.append(obj) + try: + try: + return self.dispatch_repr(obj, recursive) + except Exception: + return self.fallback_repr() + finally: + self._stack.pop() + + def dump_object(self, obj): + repr = items = None + if isinstance(obj, dict): + title = 'Contents of' + items = [] + for key, value in iteritems(obj): + if not isinstance(key, string_types): + items = None + break + items.append((key, self.repr(value))) + if items is None: + items = [] + repr = self.repr(obj) + for key in dir(obj): + try: + items.append((key, self.repr(getattr(obj, key)))) + except Exception: + pass + title = 'Details for' + title += ' ' + object.__repr__(obj)[1:-1] + return self.render_object_dump(items, title, repr) + + def dump_locals(self, d): + items = [(key, self.repr(value)) for key, value in d.items()] + return self.render_object_dump(items, 'Local variables in frame') + + def render_object_dump(self, items, title, repr=None): + html_items = [] + for key, value in items: + html_items.append('%s
%s
' % + (escape(key), value)) + if not html_items: + html_items.append('Nothing') + return OBJECT_DUMP_HTML % { + 'title': escape(title), + 'repr': repr and '
%s
' % repr or '', + 'items': '\n'.join(html_items) + } diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/FONT_LICENSE b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/FONT_LICENSE new file mode 100644 index 0000000..ae78a8f --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/FONT_LICENSE @@ -0,0 +1,96 @@ +------------------------------- +UBUNTU FONT LICENCE Version 1.0 +------------------------------- + +PREAMBLE +This licence allows the licensed fonts to be used, studied, modified and +redistributed freely. The fonts, including any derivative works, can be +bundled, embedded, and redistributed provided the terms of this licence +are met. The fonts and derivatives, however, cannot be released under +any other licence. The requirement for fonts to remain under this +licence does not require any document created using the fonts or their +derivatives to be published under this licence, as long as the primary +purpose of the document is not to be a vehicle for the distribution of +the fonts. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this licence and clearly marked as such. This may +include source files, build scripts and documentation. + +"Original Version" refers to the collection of Font Software components +as received under this licence. + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to +a new environment. + +"Copyright Holder(s)" refers to all individuals and companies who have a +copyright ownership of the Font Software. + +"Substantially Changed" refers to Modified Versions which can be easily +identified as dissimilar to the Font Software by users of the Font +Software comparing the Original Version with the Modified Version. + +To "Propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification and with or without charging +a redistribution fee), making available to the public, and in some +countries other activities as well. + +PERMISSION & CONDITIONS +This licence does not grant any rights under trademark law and all such +rights are reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of the Font Software, to propagate the Font Software, subject to +the below conditions: + +1) Each copy of the Font Software must contain the above copyright +notice and this licence. These can be included either as stand-alone +text files, human-readable headers or in the appropriate machine- +readable metadata fields within text or binary files as long as those +fields can be easily viewed by the user. + +2) The font name complies with the following: +(a) The Original Version must retain its name, unmodified. +(b) Modified Versions which are Substantially Changed must be renamed to +avoid use of the name of the Original Version or similar names entirely. +(c) Modified Versions which are not Substantially Changed must be +renamed to both (i) retain the name of the Original Version and (ii) add +additional naming elements to distinguish the Modified Version from the +Original Version. The name of such Modified Versions must be the name of +the Original Version, with "derivative X" where X represents the name of +the new work, appended to that name. + +3) The name(s) of the Copyright Holder(s) and any contributor to the +Font Software shall not be used to promote, endorse or advertise any +Modified Version, except (i) as required by this licence, (ii) to +acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with +their explicit written permission. + +4) The Font Software, modified or unmodified, in part or in whole, must +be distributed entirely under this licence, and must not be distributed +under any other licence. The requirement for fonts to remain under this +licence does not affect any document created using the Font Software, +except any version of the Font Software extracted from a document +created using the Font Software may only be distributed under this +licence. + +TERMINATION +This licence becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF +COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER +DEALINGS IN THE FONT SOFTWARE. diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/console.png b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/console.png new file mode 100644 index 0000000..c28dd63 Binary files /dev/null and b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/console.png differ diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/debugger.js b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/debugger.js new file mode 100644 index 0000000..534019d --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/debugger.js @@ -0,0 +1,205 @@ +$(function() { + if (!EVALEX_TRUSTED) { + initPinBox(); + } + + /** + * if we are in console mode, show the console. + */ + if (CONSOLE_MODE && EVALEX) { + openShell(null, $('div.console div.inner').empty(), 0); + } + + $('div.traceback div.frame').each(function() { + var + target = $('pre', this), + consoleNode = null, + frameID = this.id.substring(6); + + target.click(function() { + $(this).parent().toggleClass('expanded'); + }); + + /** + * Add an interactive console to the frames + */ + if (EVALEX && target.is('.current')) { + $('') + .attr('title', 'Open an interactive python shell in this frame') + .click(function() { + consoleNode = openShell(consoleNode, target, frameID); + return false; + }) + .prependTo(target); + } + }); + + /** + * toggle traceback types on click. + */ + $('h2.traceback').click(function() { + $(this).next().slideToggle('fast'); + $('div.plain').slideToggle('fast'); + }).css('cursor', 'pointer'); + $('div.plain').hide(); + + /** + * Add extra info (this is here so that only users with JavaScript + * enabled see it.) + */ + $('span.nojavascript') + .removeClass('nojavascript') + .html('

To switch between the interactive traceback and the plaintext ' + + 'one, you can click on the "Traceback" headline. From the text ' + + 'traceback you can also create a paste of it. ' + (!EVALEX ? '' : + 'For code execution mouse-over the frame you want to debug and ' + + 'click on the console icon on the right side.' + + '

You can execute arbitrary Python code in the stack frames and ' + + 'there are some extra helpers available for introspection:' + + '

  • dump() shows all variables in the frame' + + '
  • dump(obj) dumps all that\'s known about the object
')); + + /** + * Add the pastebin feature + */ + $('div.plain form') + .submit(function() { + var label = $('input[type="submit"]', this); + var old_val = label.val(); + label.val('submitting...'); + $.ajax({ + dataType: 'json', + url: document.location.pathname, + data: {__debugger__: 'yes', tb: TRACEBACK, cmd: 'paste', + s: SECRET}, + success: function(data) { + $('div.plain span.pastemessage') + .removeClass('pastemessage') + .text('Paste created: ') + .append($('#' + data.id + '').attr('href', data.url)); + }, + error: function() { + alert('Error: Could not submit paste. No network connection?'); + label.val(old_val); + } + }); + return false; + }); + + // if we have javascript we submit by ajax anyways, so no need for the + // not scaling textarea. + var plainTraceback = $('div.plain textarea'); + plainTraceback.replaceWith($('
').text(plainTraceback.text()));
+});
+
+function initPinBox() {
+  $('.pin-prompt form').submit(function(evt) {
+    evt.preventDefault();
+    var pin = this.pin.value;
+    var btn = this.btn;
+    btn.disabled = true;
+    $.ajax({
+      dataType: 'json',
+      url: document.location.pathname,
+      data: {__debugger__: 'yes', cmd: 'pinauth', pin: pin,
+             s: SECRET},
+      success: function(data) {
+        btn.disabled = false;
+        if (data.auth) {
+          EVALEX_TRUSTED = true;
+          $('.pin-prompt').fadeOut();
+        } else {
+          if (data.exhausted) {
+            alert('Error: too many attempts.  Restart server to retry.');
+          } else {
+            alert('Error: incorrect pin');
+          }
+        }
+        console.log(data);
+      },
+      error: function() {
+        btn.disabled = false;
+        alert('Error: Could not verify PIN.  Network error?');
+      }
+    });
+  });
+}
+
+function promptForPin() {
+  if (!EVALEX_TRUSTED) {
+    $.ajax({
+      url: document.location.pathname,
+      data: {__debugger__: 'yes', cmd: 'printpin', s: SECRET}
+    });
+    $('.pin-prompt').fadeIn(function() {
+      $('.pin-prompt input[name="pin"]').focus();
+    });
+  }
+}
+
+
+/**
+ * Helper function for shell initialization
+ */
+function openShell(consoleNode, target, frameID) {
+  promptForPin();
+  if (consoleNode)
+    return consoleNode.slideToggle('fast');
+  consoleNode = $('
')
+    .appendTo(target.parent())
+    .hide()
+  var historyPos = 0, history = [''];
+  var output = $('
[console ready]
') + .appendTo(consoleNode); + var form = $('
>>>
') + .submit(function() { + var cmd = command.val(); + $.get('', { + __debugger__: 'yes', cmd: cmd, frm: frameID, s: SECRET}, function(data) { + var tmp = $('
').html(data); + $('span.extended', tmp).each(function() { + var hidden = $(this).wrap('').hide(); + hidden + .parent() + .append($('  ') + .click(function() { + hidden.toggle(); + $(this).toggleClass('open') + return false; + })); + }); + output.append(tmp); + command.focus(); + consoleNode.scrollTop(consoleNode.get(0).scrollHeight); + var old = history.pop(); + history.push(cmd); + if (typeof old != 'undefined') + history.push(old); + historyPos = history.length - 1; + }); + command.val(''); + return false; + }). + appendTo(consoleNode); + + var command = $('') + .appendTo(form) + .keydown(function(e) { + if (e.charCode == 100 && e.ctrlKey) { + output.text('--- screen cleared ---'); + return false; + } + else if (e.charCode == 0 && (e.keyCode == 38 || e.keyCode == 40)) { + if (e.keyCode == 38 && historyPos > 0) + historyPos--; + else if (e.keyCode == 40 && historyPos < history.length) + historyPos++; + command.val(history[historyPos]); + return false; + } + }); + + return consoleNode.slideDown('fast', function() { + command.focus(); + }); +} diff --git a/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/jquery.js b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/jquery.js new file mode 100644 index 0000000..0f60b7b --- /dev/null +++ b/flask/venv/lib/python3.6/site-packages/werkzeug/debug/shared/jquery.js @@ -0,0 +1,5 @@ +/*! jQuery v1.11.3 | (c) 2005, 2015 jQuery Foundation, Inc. | jquery.org/license */ +!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.3",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)+1>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b="length"in a&&a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ha(),z=ha(),A=ha(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,aa=/[+~]/,ba=/'|\\/g,ca=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),da=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},ea=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fa){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function ga(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(ba,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+ra(o[l]);w=aa.test(a)&&pa(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ia(a){return a[u]=!0,a}function ja(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ka(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function na(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function oa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=ga.support={},f=ga.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=ga.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",ea,!1):e.attachEvent&&e.attachEvent("onunload",ea)),p=!f(g),c.attributes=ja(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ja(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=ja(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(ja(function(a){o.appendChild(a).innerHTML="",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ja(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ja(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return la(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?la(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},ga.matches=function(a,b){return ga(a,null,null,b)},ga.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return ga(b,n,null,[a]).length>0},ga.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},ga.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},ga.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},ga.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=ga.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=ga.selectors={cacheLength:50,createPseudo:ia,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ca,da),a[3]=(a[3]||a[4]||a[5]||"").replace(ca,da),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||ga.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&ga.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ca,da).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=ga.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||ga.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ia(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ia(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ia(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ia(function(a){return function(b){return ga(a,b).length>0}}),contains:ia(function(a){return a=a.replace(ca,da),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ia(function(a){return W.test(a||"")||ga.error("unsupported lang: "+a),a=a.replace(ca,da).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:oa(function(){return[0]}),last:oa(function(a,b){return[b-1]}),eq:oa(function(a,b,c){return[0>c?c+b:c]}),even:oa(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:oa(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:oa(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:oa(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function sa(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function ta(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ua(a,b,c){for(var d=0,e=b.length;e>d;d++)ga(a,b[d],c);return c}function va(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wa(a,b,c,d,e,f){return d&&!d[u]&&(d=wa(d)),e&&!e[u]&&(e=wa(e,f)),ia(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ua(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:va(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=va(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=va(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sa(function(a){return a===b},h,!0),l=sa(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sa(ta(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wa(i>1&&ta(m),i>1&&ra(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xa(a.slice(i,e)),f>e&&xa(a=a.slice(e)),f>e&&ra(a))}m.push(c)}return ta(m)}function ya(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=va(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&ga.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ia(f):f}return h=ga.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,ya(e,d)),f.selector=a}return f},i=ga.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ca,da),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ca,da),aa.test(j[0].type)&&pa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&ra(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,aa.test(a)&&pa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ja(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ja(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ka("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ja(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ka("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ja(function(a){return null==a.getAttribute("disabled")})||ka(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),ga}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1; + +return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function aa(){return!0}function ba(){return!1}function ca(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),ha=/^\s+/,ia=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,ja=/<([\w:]+)/,ka=/\s*$/g,ra={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:k.htmlSerialize?[0,"",""]:[1,"X
","
"]},sa=da(y),ta=sa.appendChild(y.createElement("div"));ra.optgroup=ra.option,ra.tbody=ra.tfoot=ra.colgroup=ra.caption=ra.thead,ra.th=ra.td;function ua(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ua(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function va(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wa(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xa(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function ya(a){var b=pa.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function za(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Aa(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Ba(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xa(b).text=a.text,ya(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!ga.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(ta.innerHTML=a.outerHTML,ta.removeChild(f=ta.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ua(f),h=ua(a),g=0;null!=(e=h[g]);++g)d[g]&&Ba(e,d[g]);if(b)if(c)for(h=h||ua(a),d=d||ua(f),g=0;null!=(e=h[g]);g++)Aa(e,d[g]);else Aa(a,f);return d=ua(f,"script"),d.length>0&&za(d,!i&&ua(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=da(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(la.test(f)){h=h||o.appendChild(b.createElement("div")),i=(ja.exec(f)||["",""])[1].toLowerCase(),l=ra[i]||ra._default,h.innerHTML=l[1]+f.replace(ia,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&ha.test(f)&&p.push(b.createTextNode(ha.exec(f)[0])),!k.tbody){f="table"!==i||ka.test(f)?""!==l[1]||ka.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ua(p,"input"),va),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ua(o.appendChild(f),"script"),g&&za(h),c)){e=0;while(f=h[e++])oa.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ua(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&za(ua(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ua(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fa,""):void 0;if(!("string"!=typeof a||ma.test(a)||!k.htmlSerialize&&ga.test(a)||!k.leadingWhitespace&&ha.test(a)||ra[(ja.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ia,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ua(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ua(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&na.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ua(i,"script"),xa),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ua(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,ya),j=0;f>j;j++)d=g[j],oa.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qa,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Ca,Da={};function Ea(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fa(a){var b=y,c=Da[a];return c||(c=Ea(a,b),"none"!==c&&c||(Ca=(Ca||m("